For some reason my variables stringy and stringy are printing to the console just fine, but when I try to set them to a label, they show up as nil.
My goal is to print out the string and the float to the app view controller but this is just not working.
I think it has something to do with the viewdidload, as if its hiding the global variables. however if I try to set my label outside the viewdidload I get a declaration error.
//  ViewController.swift
//  Intellicam
//
import UIKit
import AVKit
import Vision
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {
    var stringy:String!
    var stringie:Float!
    override func viewDidLoad() {
        super.viewDidLoad()
        //here we start the camera
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = .photo
        guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else {return}
        captureSession.addInput(input)
        captureSession.startRunning()
        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame
        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
        captureSession.addOutput(dataOutput)
     //   let request = VNCoreMLModel(model: VNCoreMLModel, completionHandler: VNRequestCompletionHandler)
     //   VNImageRequestHandler(cgImage: <#T##CGImage#>, options: <#T##[VNImageOption : Any]#>)
        self.Labele.text = "Guess: \(stringy) + Certainty: \(stringie)"
    }
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        //print("Camera was able to capture a frame:", Date())
        guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {return}
        guard let model = try? VNCoreMLModel(for: Resnet50().model) else {return}
        let request = VNCoreMLRequest(model: model){
            (finishedReq, err) in
            //print(finishedReq.results)
            guard let results = finishedReq.results as? [VNClassificationObservation] else {return}
             guard let firstObservastion = results.first else {return}
            //print("Guess: \(firstObservastion.identifier) Certainty: \(firstObservastion.confidence)%")
            self.stringy = firstObservastion.identifier
            self.stringie = firstObservastion.confidence
            print(self.stringy)
            print(self.stringie)
        }
        try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
    }
    @IBOutlet weak var Labele: UILabel!
}