My Swift code below is trying to constrain the previewLayer to the bounds of the UIView which is named camera view. However that is not happening. The previewLayer is much bigger than the constraint cameraView UIView. The cameraView is constrained via storyboard constraints. I just want the 2 objects to be the same position and size.
import AVFoundation;import UIKit
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
}
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewWillAppear(_ animated: Bool) {
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if device.position == AVCaptureDevice.Position.front{
do{
let input = try AVCaptureDeviceInput(device: device )
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecType.jpeg]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
_ = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let fileURL = documentURL.appendingPathComponent("tempImage.jpg")
try? FileManager.default.removeItem(at: fileURL)
movieOutput.startRecording(to: fileURL, recordingDelegate: self)
let deadlineTime = DispatchTime.now() + .seconds(1)
DispatchQueue.main.asyncAfter(deadline: deadlineTime) {
print("test")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
viewDidAppear(animated )
}
func viewdidappeare(_ animated: Bool) {
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.previewLayer.frame = self.cameraView.bounds
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewLayer.connection!.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraView.layer.insertSublayer(self.previewLayer, at: 0)
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
}
}
}
}
You can try with this as follow:
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer.frame = self.cameraView.bounds
}
And
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewLayer.connection!.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraView.layer.insertSublayer(self.previewLayer, at: 0)
So Your full updated code:
import AVFoundation
import UIKit
class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate {
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
}
var captureSession = AVCaptureSession()
var sessionOutput = AVCaptureStillImageOutput()
var movieOutput = AVCaptureMovieFileOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
@IBOutlet var cameraView: UIView!
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
self.previewLayer.frame = self.cameraView.bounds
}
override func viewWillAppear(_ animated: Bool) {
let devices = AVCaptureDevice.devices(for: AVMediaType.video)
for device in devices {
if device.position == AVCaptureDevice.Position.front{
do{
let input = try AVCaptureDeviceInput(device: device )
if captureSession.canAddInput(input){
captureSession.addInput(input)
sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecType.jpeg]
if captureSession.canAddOutput(sessionOutput){
captureSession.addOutput(sessionOutput)
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
previewLayer.connection!.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraView.layer.insertSublayer(self.previewLayer, at: 0)
}
captureSession.addOutput(movieOutput)
captureSession.startRunning()
_ = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
let documentURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
let fileURL = documentURL.appendingPathComponent("tempImage.jpg")
try? FileManager.default.removeItem(at: fileURL)
movieOutput.startRecording(to: fileURL, recordingDelegate: self)
let deadlineTime = DispatchTime.now() + .seconds(1)
DispatchQueue.main.asyncAfter(deadline: deadlineTime) {
print("test")
self.movieOutput.stopRecording()
}
}
}
catch{
print("Error")
}
}
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
print("FINISHED \(error)")
// save video to camera roll
if error == nil {
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil)
}
}
}