I have a very basic AVFoundation Camera that has a captureButton
that will take a photo and send that photo to the secondCameraController
for it to be displayed. My problem is that there is a lot of iOS 10 deprecation and I'm not sure how I add in a flash when I press the captureButton
. Any help will be highly appreciated. My code is below. Thank you guys.
class CameraController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
let captureSession = AVCaptureSession()
var previewLayer: CALayer!
var captureDevice: AVCaptureDevice!
var takePhoto: Bool = false
override func viewDidLoad() {
super.viewDidLoad()
view.backgroundColor = .white
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
prepareCamera()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
navigationController?.setNavigationBarHidden(true, animated: true)
}
let cameraView: UIView = {
let view = UIView()
view.backgroundColor = .red
return view
}()
func prepareCamera() {
captureSession.sessionPreset = AVCaptureSessionPresetPhoto
if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
captureDevice = availableDevices.first
beginSession()
}
}
func beginSession() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
captureSession.addInput(captureDeviceInput)
} catch {
print(error.localizedDescription)
}
if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
self.previewLayer = previewLayer
self.view.layer.addSublayer(self.previewLayer)
self.previewLayer.frame = CGRect(x: 0, y: 0, width: view.frame.width, height: view.frame.height)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
self.view.addSubview(captureButton)
let width: CGFloat = 85
captureButton.frame = CGRect(x: (previewLayer.frame.width / 2) - width / 2, y: (previewLayer.frame.height) - width - 25, width: width, height: 85)
captureSession.startRunning()
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]
dataOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(dataOutput) {
captureSession.addOutput(dataOutput)
}
captureSession.commitConfiguration()
let queue = DispatchQueue(label: "com.cheekylabsltd.camera")
dataOutput.setSampleBufferDelegate(self, queue: queue)
}
}
func handleCapture() {
takePhoto = true
}
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
if takePhoto {
takePhoto = false
if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
let secondController = SecondCameraController()
secondController.takenPhoto = image
DispatchQueue.main.async {
self.present(secondController, animated: true, completion: {
self.stopCaptureSession()
})
}
}
}
}
func getImageFromSampleBuffer(buffer: CMSampleBuffer) -> UIImage? {
if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
let context = CIContext()
let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))
if let image = context.createCGImage(ciImage, from: imageRect) {
return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
}
}
return nil
}
func stopCaptureSession() {
self.captureSession.stopRunning()
if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
for input in inputs {
self.captureSession.removeInput(input)
}
}
}
lazy var captureButton: UIButton = {
let button = UIButton(type: .system)
button.backgroundColor = .white
button.layer.cornerRadius = 42.5
button.clipsToBounds = true
button.alpha = 0.40
button.layer.borderWidth = 4
button.layer.borderColor = greenColor.cgColor
button.addTarget(self, action: #selector(handleCapture), for: .touchUpInside)
return button
}()
}
Try this code : Swift v3.0
private func flashOn(device:AVCaptureDevice)
{
do{
if (device.hasTorch)
{
try device.lockForConfiguration()
device.torchMode = .on
device.flashMode = .on
device.unlockForConfiguration()
}
}catch{
//DISABEL FLASH BUTTON HERE IF ERROR
print("Device tourch Flash Error ");
}
}
//FOR FLASH OFF CODE
private func flashOff(device:AVCaptureDevice)
{
do{
if (device.hasTorch){
try device.lockForConfiguration()
device.torchMode = .off
device.flashMode = .off
device.unlockForConfiguration()
}
}catch{
//DISABEL FLASH BUTTON HERE IF ERROR
print("Device tourch Flash Error ");
}
}
// METHOD
//private let session = AVCaptureSession()
//MARK: FLASH UITLITY METHODS
func toggleFlash() {
var device : AVCaptureDevice!
if #available(iOS 10.0, *) {
let videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDuoCamera], mediaType: AVMediaTypeVideo, position: .unspecified)!
let devices = videoDeviceDiscoverySession.devices!
device = devices.first!
} else {
// Fallback on earlier versions
device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
}
if ((device as AnyObject).hasMediaType(AVMediaTypeVideo))
{
if (device.hasTorch)
{
self.session.beginConfiguration()
//self.objOverlayView.disableCenterCameraBtn();
if device.isTorchActive == false {
self.flashOn(device: device)
} else {
self.flashOff(device: device);
}
//self.objOverlayView.enableCenterCameraBtn();
self.session.commitConfiguration()
}
}
}