ARKit图像识别为黑白

时间:2018-07-09 07:28:44

标签: ios swift xcode image arkit

我在识别图像时遇到问题。从我附带的视频中可以看到,对所需的图片进行构图,然后为我提供黑白图像。

这是我正在使用的代码,我的目的是识别特定绘画的图像,并在AR中以颜色查看它。

PS:用于识别的图像以PNG格式上传到色彩资产。

Watch this video

import UIKit
import SceneKit
import ARKit

class ViewController: UIViewController, ARSCNViewDelegate, ARSessionDelegate {
    // MARK: - IBOutlets

    @IBOutlet weak var sessionInfoLabel: UILabel!
    @IBOutlet weak var sceneView: ARSCNView!

    // MARK: - View Life Cycle

    /// - Tag: StartARSession
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        guard ARWorldTrackingConfiguration.isSupported else {
            fatalError("""
                ARKit is not available on this device. For apps that require ARKit
                for core functionality, use the `arkit` key in the key in the
                `UIRequiredDeviceCapabilities` section of the Info.plist to prevent
                the app from installing. (If the app can't be installed, this error
                can't be triggered in a production scenario.)
                In apps where AR is an additive feature, use `isSupported` to
                determine whether to show UI for launching AR experiences.
            """) // For details, see https://developer.apple.com/documentation/arkit
        }

        // Start the view's AR session with a configuration that uses the rear camera,
        // device position and orientation tracking, and plane detection.
        let configuration = ARWorldTrackingConfiguration()
        configuration.planeDetection = [.horizontal, .vertical]
        sceneView.session.run(configuration)

        let scanningPanel = UIImageView()
        scanningPanel.backgroundColor = UIColor(white: 0.33, alpha: 0.6)
        scanningPanel.layer.masksToBounds = true
        scanningPanel.frame = CGRect(x: -2, y: self.sceneView.frame.height-270, width: 178, height: 50)
        scanningPanel.layer.cornerRadius = 10

        let scanInfo = UILabel(frame: CGRect(x: 8, y: self.sceneView.frame.height-268, width: 160, height: 45))
        scanInfo.textAlignment = .left
        scanInfo.font = scanInfo.font.withSize(15)
        scanInfo.textColor = UIColor.white
        scanInfo.text = "SCAN A SURFACE"

        self.sceneView.addSubview(scanningPanel)
        self.sceneView.addSubview(scanInfo)


        // Set a delegate to track the number of plane anchors for providing UI feedback.
        sceneView.session.delegate = self

        // Prevent the screen from being dimmed after a while as users will likely
        // have long periods of interaction without touching the screen or buttons.
        UIApplication.shared.isIdleTimerDisabled = true

        // Show debug UI to view performance metrics (e.g. frames per second).
        sceneView.showsStatistics = true

        resetTracking()
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        // Pause the view's AR session.
        sceneView.session.pause()
    }

    // MARK: - ARSCNViewDelegate

    var session: ARSession {
        return sceneView.session
    }

    enum MessageType {
        case trackingStateEscalation
        case contentPlacement

        static var all: [MessageType] = [
            .trackingStateEscalation,
            .contentPlacement
        ]
    }

    func cancelScheduledMessage(for messageType: MessageType) {
        timers[messageType]?.invalidate()
        timers[messageType] = nil
    }

    @IBOutlet weak private var messagePanel: UIVisualEffectView!

    private var timers: [MessageType: Timer] = [:]
    private var messageHideTimer: Timer?

    private let displayDuration: TimeInterval = 6

    @IBOutlet weak private var messageLabel: UILabel!

    private func setMessageHidden(_ hide: Bool, animated: Bool) {
        // The panel starts out hidden, so show it before animating opacity.
        messagePanel.isHidden = false

        guard animated else {
            messagePanel.alpha = hide ? 0 : 1
            return
        }

        UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
            self.messagePanel.alpha = hide ? 0 : 1
        }, completion: nil)
    }

    func showMessage(_ text: String, autoHide: Bool = true) {
        // Cancel any previous hide timer.
        messageHideTimer?.invalidate()

        messageLabel.text = text

        // Make sure status is showing.
        setMessageHidden(false, animated: true)

        if autoHide {
            messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
                self?.setMessageHidden(true, animated: true)
            })
        }
    }

    func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
        cancelScheduledMessage(for: messageType)

        let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
            self?.showMessage(text)
            timer.invalidate()
        })
        timers[messageType] = timer
    }


    func resetTracking() {

        guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else {
            fatalError("Missing expected asset catalog resources.")
        }

        let configuration = ARWorldTrackingConfiguration()
        configuration.detectionImages = referenceImages
        session.run(configuration, options: [.resetTracking, .removeExistingAnchors])

        //statusViewController.scheduleMessage("Look around to detect images", inSeconds: 7.5, messageType: .contentPlacement)
    }

    let updateQueue = DispatchQueue(label: Bundle.main.bundleIdentifier! +
        ".serialSceneKitQueue")


    /// - Tag: PlaceARContent
    func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
        // Place content only for anchors found by plane detection.
        guard let imageAnchor = anchor as? ARImageAnchor else { return }
        let referenceImage = imageAnchor.referenceImage
        updateQueue.async {

            // Create a SceneKit plane to visualize the plane anchor using its position and extent.
            let plane = SCNPlane(width: referenceImage.physicalSize.width,
                                 height: referenceImage.physicalSize.height)

            let planeNode = SCNNode(geometry: plane)

            planeNode.geometry?.materials.first?.diffuse.contents = UIImage(named: "Crocefissione")

            plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione")

            //planeNode.geometry?.firstMaterial?.diffuse.contents = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil)

            // `SCNPlane` is vertically oriented in its local coordinate space, so
            // rotate the plane to match the horizontal orientation of `ARPlaneAnchor`.
            planeNode.eulerAngles.x = -.pi / 2
            planeNode.runAction(self.imageHighlightAction)

            // Make the plane visualization semitransparent to clearly show real-world placement.
            // Add the plane visualization to the ARKit-managed node so that it tracks
            // changes in the plane anchor as plane estimation continues.
            node.addChildNode(planeNode)
        }

        /// - Tag: UpdateARContent
        func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
            // Update content only for plane anchors and nodes matching the setup created in `renderer(_:didAdd:for:)`.
            guard let planeAnchor = anchor as?  ARPlaneAnchor,
                let planeNode = node.childNodes.first,
                let plane = planeNode.geometry as? SCNPlane
                else { return }

            // Plane estimation may shift the center of a plane relative to its anchor's transform.
            planeNode.simdPosition = float3(planeAnchor.center.x, 0, planeAnchor.center.z)

            // Plane estimation may also extend planes, or remove one plane to merge its extent into another.
            plane.width = CGFloat(planeAnchor.extent.x)
            plane.height = CGFloat(planeAnchor.extent.z)
        }

        // MARK: - ARSessionDelegate

        func session(_ session: ARSession, didAdd anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            //updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, didRemove anchors: [ARAnchor]) {
            guard let frame = session.currentFrame else { return }
            //updateSessionInfoLabel(for: frame, trackingState: frame.camera.trackingState)
        }

        func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
            //updateSessionInfoLabel(for: session.currentFrame!, trackingState: camera.trackingState)
        }

        // MARK: - ARSessionObserver

        func sessionWasInterrupted(_ session: ARSession) {
            // Inform the user that the session has been interrupted, for example, by presenting an overlay.
            sessionInfoLabel.text = "Session was interrupted"
        }

        func sessionInterruptionEnded(_ session: ARSession) {
            // Reset tracking and/or remove existing anchors if consistent tracking is required.
            sessionInfoLabel.text = "Session interruption ended"
            resetTracking()
        }

        func session(_ session: ARSession, didFailWithError error: Error) {
            // Present an error message to the user.
            sessionInfoLabel.text = "Session failed: \(error.localizedDescription)"
            resetTracking()
        }

        // MARK: - Private methods

        func updateSessionInfoLabel(for frame: ARFrame, trackingState: ARCamera.TrackingState) {
            // Update the UI to provide feedback on the state of the AR experience.
            let message: String

            switch trackingState {
            case .normal where frame.anchors.isEmpty:
                // No planes detected; provide instructions for this app's AR interactions.
                message = "Move the device around to detect horizontal surfaces."

            case .notAvailable:
                message = "Tracking unavailable."

            case .limited(.excessiveMotion):
                message = "Tracking limited - Move the device more slowly."

            case .limited(.insufficientFeatures):
                message = "Tracking limited - Point the device at an area with visible surface detail, or improve lighting conditions."

            case .limited(.initializing):
                message = "Initializing AR session."

            default:
                // No feedback needed when tracking is normal and planes are visible.
                // (Nor when in unreachable limited-tracking states.)
                message = ""
            }
        }

        func resetTrackin() {
            let configuration = ARWorldTrackingConfiguration()
            configuration.planeDetection = .horizontal
            sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
        }
    }
    var imageHighlightAction: SCNAction {
        return .sequence([
            .wait(duration: 100.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOpacity(to: 0, duration: 0.25),
            .fadeOut(duration: 0.5),
            .removeFromParentNode()
            ])
    }
}

1 个答案:

答案 0 :(得分:0)

我很高兴知道这已经很晚了,可以肯定的是,现在您已经解决了该问题,但是,当回答另一个问题Get ARReferenceImage时,我遇到了与您相同的问题。

看看您的代码,假设您设置以下内容:

.et_pb_row_5.et_pb_row {
    margin-left: auto!important;
    margin-right: auto!important;
    margin-top: -7px!important;
    margin-right: 587px!important;
    margin-bottom: -40px!important;
    margin-left: 587px!important;
}

那个plane.firstMaterial?.diffuse.contents = UIImage(named: "Crocefissione") 是指您实际的Crocefissione

如果是这样,那么我相信幕后会发生一些魔术,并且Xcode将这些魔术转换为ARReferenceImage,这似乎是GrayScale能够检测到ARKit的方式。

如果我错了,请告诉我^ _______ ^。

无论如何,因此可能的解决方案是做这样的事情。

ARReferenceImage的每一个复制到ARReferenceImage's中,并给它一个Assets Catalogue,例如颜色(yourImageName):

enter image description here

然后,您需要稍微更改功能以显示正确颜色的图像,例如:

prefix