xcode IB出口问题-线索1:EXC_不良_访问

xdnvmnnf  于 2023-01-21  发布在  其他
关注(0)|答案(1)|浏览(84)

我正在做一个基于Apple's example code.的电话号码识别应用程序,我对swift和一般的编码都很陌生。当我运行这个项目时,我得到了“线程1:EXC_BAD_ACCESS(代码=257,地址= 0x 7 e700019 ec 0ad 79)”第68行“预览视图.会话=捕获会话”错误。我认为它与第26行“@IBOutlet弱变量预览视图:预览查看!”?
我有一个视图控制器,有两个视图和一个标签。IB插座看起来适合其中两个,但第26行看起来不一样。文本PreviewView!的颜色是灰色,而不是像其他的蓝色。这可能是问题所在吗?

import AVFoundation

import Vision


class TextScanViewController: UIViewController {

    // MARK: - UI objects

    

    @IBOutlet weak var previewView: PreviewView!

    @IBOutlet weak var cutoutView: UIView!

    @IBOutlet weak var numberView: UILabel!

    var maskLayer = CAShapeLayer()

    // Device orientation. Updated whenever the orientation changes to a

    // different supported orientation.

    var currentOrientation = UIDeviceOrientation.portrait

    

    // MARK: - Capture related objects

    private let captureSession = AVCaptureSession()

    let captureSessionQueue = DispatchQueue(label: "com.example.apple-samplecode.CaptureSessionQueue")

    

    var captureDevice: AVCaptureDevice?

    

    var videoDataOutput = AVCaptureVideoDataOutput()

    let videoDataOutputQueue = DispatchQueue(label: "com.example.apple-samplecode.VideoDataOutputQueue")

    

    // MARK: - Region of interest (ROI) and text orientation

    // Region of video data output buffer that recognition should be run on.

    // Gets recalculated once the bounds of the preview layer are known.

    var regionOfInterest = CGRect(x: 0, y: 0, width: 1, height: 1)

    // Orientation of text to search for in the region of interest.

    var textOrientation = CGImagePropertyOrientation.up

    

    // MARK: - Coordinate transforms

    var bufferAspectRatio: Double!

    // Transform from UI orientation to buffer orientation.

    var uiRotationTransform = CGAffineTransform.identity

    // Transform bottom-left coordinates to top-left.

    var bottomToTopTransform = CGAffineTransform(scaleX: 1, y: -1).translatedBy(x: 0, y: -1)

    // Transform coordinates in ROI to global coordinates (still normalized).

    var roiToGlobalTransform = CGAffineTransform.identity

    

    // Vision -> AVF coordinate transform.

    var visionToAVFTransform = CGAffineTransform.identity

    

    // MARK: - View controller methods

    

    override func viewDidLoad() {

        super.viewDidLoad()

        

        // Set up preview view.

        previewView.session = captureSession

        

        // Set up cutout view.

        cutoutView.backgroundColor = UIColor.gray.withAlphaComponent(0.5)

        maskLayer.backgroundColor = UIColor.clear.cgColor

        maskLayer.fillRule = .evenOdd

        cutoutView.layer.mask = maskLayer

        

        // Starting the capture session is a blocking call. Perform setup using

        // a dedicated serial dispatch queue to prevent blocking the main thread.

        captureSessionQueue.async {

            self.setupCamera()

            

            // Calculate region of interest now that the camera is setup.

            DispatchQueue.main.async {

                // Figure out initial ROI.

                self.calculateRegionOfInterest()

            }

        }

    }

    

    override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {

        super.viewWillTransition(to: size, with: coordinator)


        // Only change the current orientation if the new one is landscape or

        // portrait. You can't really do anything about flat or unknown.

        let deviceOrientation = UIDevice.current.orientation

        if deviceOrientation.isPortrait || deviceOrientation.isLandscape {

            currentOrientation = deviceOrientation

        }

        

        // Handle device orientation in the preview layer.

        if let videoPreviewLayerConnection = previewView.videoPreviewLayer.connection {

            if let newVideoOrientation = AVCaptureVideoOrientation(deviceOrientation: deviceOrientation) {

                videoPreviewLayerConnection.videoOrientation = newVideoOrientation

            }

        }

        

        // Orientation changed: figure out new region of interest (ROI).

        calculateRegionOfInterest()

    }

    

    override func viewDidLayoutSubviews() {

        super.viewDidLayoutSubviews()

        updateCutout()

    }

    

    // MARK: - Setup

    

    func calculateRegionOfInterest() {

        // In landscape orientation the desired ROI is specified as the ratio of

        // buffer width to height. When the UI is rotated to portrait, keep the

        // vertical size the same (in buffer pixels). Also try to keep the

        // horizontal size the same up to a maximum ratio.

        let desiredHeightRatio = 0.15

        let desiredWidthRatio = 0.6

        let maxPortraitWidth = 0.8

        

        // Figure out size of ROI.

        let size: CGSize

        if currentOrientation.isPortrait || currentOrientation == .unknown {

            size = CGSize(width: min(desiredWidthRatio * bufferAspectRatio, maxPortraitWidth), height: desiredHeightRatio / bufferAspectRatio)

        } else {

            size = CGSize(width: desiredWidthRatio, height: desiredHeightRatio)

        }

        // Make it centered.

        regionOfInterest.origin = CGPoint(x: (1 - size.width) / 2, y: (1 - size.height) / 2)

        regionOfInterest.size = size

        

        // ROI changed, update transform.

        setupOrientationAndTransform()

        

        // Update the cutout to match the new ROI.

        DispatchQueue.main.async {

            // Wait for the next run cycle before updating the cutout. This

            // ensures that the preview layer already has its new orientation.

            self.updateCutout()

        }

    }

    

    func updateCutout() {

        // Figure out where the cutout ends up in layer coordinates.

        let roiRectTransform = bottomToTopTransform.concatenating(uiRotationTransform)

        let cutout = previewView.videoPreviewLayer.layerRectConverted(fromMetadataOutputRect: regionOfInterest.applying(roiRectTransform))

        

        // Create the mask.

        let path = UIBezierPath(rect: cutoutView.frame)

        path.append(UIBezierPath(rect: cutout))

        maskLayer.path = path.cgPath

        

        // Move the number view down to under cutout.

        var numFrame = cutout

        numFrame.origin.y += numFrame.size.height

        numberView.frame = numFrame

    }

    

    func setupOrientationAndTransform() {

        // Recalculate the affine transform between Vision coordinates and AVF coordinates.

        

        // Compensate for region of interest.

        let roi = regionOfInterest

        roiToGlobalTransform = CGAffineTransform(translationX: roi.origin.x, y: roi.origin.y).scaledBy(x: roi.width, y: roi.height)

        

        // Compensate for orientation (buffers always come in the same orientation).

        switch currentOrientation {

        case .landscapeLeft:

            textOrientation = CGImagePropertyOrientation.up

            uiRotationTransform = CGAffineTransform.identity

        case .landscapeRight:

            textOrientation = CGImagePropertyOrientation.down

            uiRotationTransform = CGAffineTransform(translationX: 1, y: 1).rotated(by: CGFloat.pi)

        case .portraitUpsideDown:

            textOrientation = CGImagePropertyOrientation.left

            uiRotationTransform = CGAffineTransform(translationX: 1, y: 0).rotated(by: CGFloat.pi / 2)

        default: // We default everything else to .portraitUp

            textOrientation = CGImagePropertyOrientation.right

            uiRotationTransform = CGAffineTransform(translationX: 0, y: 1).rotated(by: -CGFloat.pi / 2)

        }

        

        // Full Vision ROI to AVF transform.

        visionToAVFTransform = roiToGlobalTransform.concatenating(bottomToTopTransform).concatenating(uiRotationTransform)

    }

    

    func setupCamera() {

        guard let captureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back) else {

            print("Could not create capture device.")

            return

        }

        self.captureDevice = captureDevice

        

        // NOTE:

        // Requesting 4k buffers allows recognition of smaller text but will

        // consume more power. Use the smallest buffer size necessary to keep

        // down battery usage.

        if captureDevice.supportsSessionPreset(.hd4K3840x2160) {

            captureSession.sessionPreset = AVCaptureSession.Preset.hd4K3840x2160

            bufferAspectRatio = 3840.0 / 2160.0

        } else {

            captureSession.sessionPreset = AVCaptureSession.Preset.hd1920x1080

            bufferAspectRatio = 1920.0 / 1080.0

        }

        

        guard let deviceInput = try? AVCaptureDeviceInput(device: captureDevice) else {

            print("Could not create device input.")

            return

        }

        if captureSession.canAddInput(deviceInput) {

            captureSession.addInput(deviceInput)

        }

        

        // Configure video data output.

        videoDataOutput.alwaysDiscardsLateVideoFrames = true

        videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)

        videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]

        if captureSession.canAddOutput(videoDataOutput) {

            captureSession.addOutput(videoDataOutput)

            // NOTE:

            // There is a trade-off to be made here. Enabling stabilization will

            // give temporally more stable results and should help the recognizer

            // converge. But if it's enabled the VideoDataOutput buffers don't

            // match what's displayed on screen, which makes drawing bounding

            // boxes very hard. Disable it in this app to allow drawing detected

            // bounding boxes on screen.

            videoDataOutput.connection(with: AVMediaType.video)?.preferredVideoStabilizationMode = .off

        } else {

            print("Could not add VDO output")

            return

        }

        

        // Set zoom and autofocus to help focus on very small text.

        do {

            try captureDevice.lockForConfiguration()

            captureDevice.videoZoomFactor = 2

            captureDevice.autoFocusRangeRestriction = .near

            captureDevice.unlockForConfiguration()

        } catch {

            print("Could not set zoom level due to error: \(error)")

            return

        }

        

        captureSession.startRunning()

    }

    

    // MARK: - UI drawing and interaction

    

    func showString(string: String) {

        // Found a definite number.

        // Stop the camera synchronously to ensure that no further buffers are

        // received. Then update the number view asynchronously.

        captureSessionQueue.sync {

            self.captureSession.stopRunning()

            DispatchQueue.main.async {

                self.numberView.text = string

                self.numberView.isHidden = false

            }

        }

    }

    

    @IBAction func handleTap(_ sender: UITapGestureRecognizer) {

        captureSessionQueue.async {

            if !self.captureSession.isRunning {

                self.captureSession.startRunning()

            }

            DispatchQueue.main.async {

                self.numberView.isHidden = true

            }

        }

    }

}


// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate


extension TextScanViewController: AVCaptureVideoDataOutputSampleBufferDelegate {

    

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

        // This is implemented in VisionViewController.

    }

}


// MARK: - Utility extensions


extension AVCaptureVideoOrientation {

    init?(deviceOrientation: UIDeviceOrientation) {

        switch deviceOrientation {

        case .portrait: self = .portrait

        case .portraitUpsideDown: self = .portraitUpsideDown

        case .landscapeLeft: self = .landscapeRight

        case .landscapeRight: self = .landscapeLeft

        default: return nil

        }

    }

}
q7solyqu

q7solyqu1#

检查情节提要或xib中的连接检查器,并检查previewView是否只有一个引用,可能有多个引用,或者在任何情况下都没有对IBOulet的引用

相关问题