I am trying to build a camera application. As I am new in iOS, I read documentations, watched tutorials and wrote the following codes:
import UIKit
import AVFoundation
class StartVC: UIViewController {
var connection: AVCaptureConnection!
var output: AVCapturePhotoOutput!
var videoPreviewLayer: AVCaptureVideoPreviewLayer!
@IBOutlet var videoPreviewView: UIView!
override func viewDidLoad() {
super.viewDidLoad()
self.createCamera()
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
self.videoPreviewLayer.bounds = self.videoPreviewView.bounds
self.videoPreviewLayer.position = CGPoint(x: self.videoPreviewView.bounds.midX, y: self.videoPreviewView.bounds.midY)
}
func createCamera() {
let captureSession = AVCaptureSession()
if captureSession.canSetSessionPreset(AVCaptureSessionPresetHigh) {
captureSession.sessionPreset = AVCaptureSessionPresetHigh
} else {
print("Error: Couldn't set preset = \(AVCaptureSessionPresetHigh)")
return
}
let cameraDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
var error: NSError?
let inputDevice = try! AVCaptureDeviceInput.init(device: cameraDevice)
//let inputDevice = AVCaptureDeviceInput.withDevice(cameraDevice) as AVCaptureDeviceInput
if let error = error {
print("Error: \(error)")
return
}
if captureSession.canAddInput(inputDevice) {
captureSession.addInput(inputDevice)
} else {
print("Error: Couldn't add input device")
return
}
let imageOutput = AVCapturePhotoOutput()
if captureSession.canAddOutput(imageOutput) {
captureSession.addOutput(imageOutput)
} else {
print("Error: Couldn't add output")
return
}
// Store imageOutput. We will need it to take photo
self.output = imageOutput
let connection = imageOutput.connections.first as! AVCaptureConnection!
// Store this connection in property. We will need it when we take image.
self.connection = connection
connection?.videoOrientation = AVCaptureVideoOrientation.portrait
captureSession.startRunning()
// This will preview the camera
let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer?.contentsScale = UIScreen.main.scale
self.videoPreviewView.layer.addSublayer(videoLayer!)
// Store this layer instance in property. We will place it into a view
self.videoPreviewLayer = videoLayer
}
@IBAction func CaptureButton(_ sender: UIButton) {
let captureSettings = AVCapturePhotoSettings()
let previewPixelType = captureSettings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 300,
kCVPixelBufferHeightKey as String: 300,
]
captureSettings.previewPhotoFormat = previewFormat
self.output.capturePhoto(with: captureSettings, delegate: self as! AVCapturePhotoCaptureDelegate)
}
}
But I am getting the error in this section.
@IBAction func CaptureButton(_ sender: UIButton) {
let captureSettings = AVCapturePhotoSettings()
let previewPixelType = captureSettings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: 300,
kCVPixelBufferHeightKey as String: 300,
]
captureSettings.previewPhotoFormat = previewFormat
// For this line
self.output.capturePhoto(with: captureSettings, delegate: self as! AVCapturePhotoCaptureDelegate)
}
The error is saying that it could not cast the value of myViewController to AVCapturePhotoCaptureDelegate
How can I solve this problem?
Try this
class StartVC: UIViewController {
}
extension StartVC: AVCapturePhotoCaptureDelegate{
}
no need of casting
self.output.capturePhoto(with: captureSettings, delegate: self)
It can be also written as
class StartVC: UIViewController,AVCapturePhotoCaptureDelegate {
}