I am doing an object detection and used UIViewControllerRepresentable
to add my view controller. The thing is that I can't pass data from my ViewController
to my SwiftUI view. I can print it tho.
Can someone help me? This is my code:
//
import SwiftUI
import AVKit
import UIKit
import Vision
let SVWidth = UIScreen.main.bounds.width
struct MaskDetectionView: View {
let hasMaskColor = Color.green
let noMaskColor = Color.red
let shadowColor = Color.gray
var body: some View {
VStack(alignment: .center) {
VStack(alignment: .center) {
Text("Please place your head inside the bounded box.")
.font(.system(size: 15, weight: .regular, design: .default))
Text("For better result, show your entire face.")
.font(.system(size: 15, weight: .regular, design: .default))
}.padding(.top, 10)
VStack(alignment: .center) {
SwiftUIViewController()
.frame(width: SVWidth - 30, height: SVWidth + 30, alignment: .center)
.background(Color.white)
.cornerRadius(25)
.shadow(color: hasMaskColor, radius: 7, x: 0, y: 0)
.padding(.top, 30)
Spacer()
/// VALUE HERE
}
}.padding()
}
}
struct MaskDetectionView_Previews: PreviewProvider {
static var previews: some View {
MaskDetectionView()
}
}
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
var result = String()
//ALL THE OBJECTS
override func viewDidLoad() {
super.viewDidLoad()
// 1 - start session
let capture_session = AVCaptureSession()
//capture_session.sessionPreset = .vga640x480
// 2 - set the device front & add input
guard let capture_device = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: .video, position: .front) else {return}
guard let input = try? AVCaptureDeviceInput(device: capture_device) else { return }
capture_session.addInput(input)
// 3 - the layer on screen that shows the picture
let previewLayer = AVCaptureVideoPreviewLayer(session: capture_session)
view.layer.addSublayer(previewLayer)
previewLayer.frame.size = CGSize(width: SVWidth, height: SVWidth + 40)
previewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
// 4 - run the session
capture_session.startRunning()
// 5 - the produced output aka image or video
let dataOutput = AVCaptureVideoDataOutput()
dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
capture_session.addOutput(dataOutput)
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection){
// our model
guard let model = try? VNCoreMLModel(for: SqueezeNet(configuration: MLModelConfiguration()).model) else { return }
// request for our model
let request = VNCoreMLRequest(model: model) { (finishedReq, err) in
if let error = err {
print("failed to detect faces:", error)
return
}
//result
guard let results = finishedReq.results as? [VNClassificationObservation] else {return}
guard let first_observation = results.first else {return}
self.result = first_observation.identifier
print(self.result)
}
guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {return}
try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
}
}
struct SwiftUIViewController: UIViewControllerRepresentable {
func makeUIViewController(context: Context) -> ViewController{
return ViewController()
}
func updateUIViewController(_ uiViewController: ViewController, context: Context) {
}
}
The idiomatic way is to circulate a Binding
instance through the UI hierarchy - this includes both the SwiftUI and the UIKit code. The Binding
will transparently update the data on all views that are connected to it, regardless of who did the change.
A diagram of the data flow could look similar to this:
OK, getting to the implementation details, first things first you need a @State
to store data that will come from the UIKit side, in order to make it available for updates to the view controller,:
struct MaskDetectionView: View {
@State var clasificationIdentifier: String = ""
Next, you need to pass this to both the view controller and the SwiftUI view:
var body: some View {
...
SwiftUIViewController(identifier: $clasificationIdentifier)
...
// this is the "VALUE HERE" from your question
Text("Clasification identifier: \(clasificationIdentifier)")
Now, that you are properly injecting the binding, you'll need to update the UIKit side of the code to allow the binding to be received.
Update your view representable to look something like this:
struct SwiftUIViewController: UIViewControllerRepresentable {
// this is the binding that is received from the SwiftUI side
let identifier: Binding<String>
// this will be the delegate of the view controller, it's role is to allow
// the data transfer from UIKit to SwiftUI
class Coordinator: ViewControllerDelegate {
let identifierBinding: Binding<String>
init(identifierBinding: Binding<String>) {
self.identifierBinding = identifierBinding
}
func clasificationOccured(_ viewController: ViewController, identifier: String) {
// whenever the view controller notifies it's delegate about receiving a new idenfifier
// the line below will propagate the change up to SwiftUI
identifierBinding.wrappedValue = identifier
}
}
func makeUIViewController(context: Context) -> ViewController{
let vc = ViewController()
vc.delegate = context.coordinator
return vc
}
func updateUIViewController(_ uiViewController: ViewController, context: Context) {
// update the controller data, if needed
}
// this is very important, this coordinator will be used in `makeUIViewController`
func makeCoordinator() -> Coordinator {
Coordinator(identifierBinding: identifier)
}
}
The last piece of the puzzle is to write the code for the view controller delegate, and the code that makes use of that delegate:
protocol ViewControllerDelegate: AnyObject {
func clasificationOccured(_ viewController: ViewController, identifier: String)
}
class ViewController: UIViewController {
weak var delegate: ViewControllerDelegate?
...
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
...
print(self.result)
// let's tell the delegate we found a new clasification
// the delegate, aka the Coordinator will then update the Binding
// the Binding will update the State, and this change will be
// propagate to the Text() element from the SwiftUI view
delegate?.clasificationOccured(self, identifier: self.result)
}