2022年2月11日 星期五

RealityKit 判斷手指觸摸物件

  1. 用Reality Composer編輯物件『屬性』,勾選『物理效果』『加入』
  2. 將ARFrame.capturedImage轉成CVPixelBuffer

    func session(_ session: ARSession, didUpdate frame: ARFrame) {

            guard let arView = arView else {return}

            // Do not enqueue other buffers for processing while another Vision task is still running.

            // The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.

            guard currentBuffer == nil, case .normal = frame.camera.trackingState else {

                return

            }

            // Retain the image buffer for Vision processing.

            let currentBuffer = frame.capturedImage

    }

  3. 利用Vision偵測食指

    let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer, orientation: orientation)

    visionQueue.async {

            do {

                    // Perform VNDetectHumanHandPoseRequest

                    try requestHandler.perform([self.handPoseRequest])

                    // Continue only when a hand was detected in the frame.

                    // Since we set the maximumHandCount property of the request to 1, there will be at most one observation.

                    guard let observation = self.handPoseRequest.results?.first else {

                        return

                    }

                    // Get points for index finger.

                    let indexFingerPoints = try observation.recognizedPoints(.indexFinger)

                    // Look for tip points.

                    guard let indexTipPoint = indexFingerPoints[.indexTip] else {

                        return

                    }

                    // Ignore low confidence points.

                    guard indexTipPoint.confidence > 0.3 else {

                        return

                    }

                    

                    

            } catch {

                    print("Error: Vision request failed with error \"\(error)\"")

            }

    }

  4. 將Vision的座標轉成ARView的座標

    // Convert points from Vision coordinates to ARView coordinates.

    let point = indexTipPoint.location

    let tip = CGPoint(x: point.x * width, y: (1 - point.y) * height)


sample code

import ARKit

import Vision

import RealityKit


class ARSessionHelper: NSObject, ARSessionDelegate {

    weak var arView: ARView?

    weak var box: Experience.Box?

    // The pixel buffer being held for analysis; used to serialize Vision requests.

    private var currentBuffer: CVPixelBuffer?

    

    // Queue for dispatching vision classification requests

    private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")

    

    private var handPoseRequest = VNDetectHumanHandPoseRequest()

    

    func session(_ session: ARSession, didUpdate frame: ARFrame) {

        guard let arView = arView else {return}

        // Do not enqueue other buffers for processing while another Vision task is still running.

        // The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.

        guard currentBuffer == nil, case .normal = frame.camera.trackingState else {

            return

        }

        // Retain the image buffer for Vision processing.

        let currentBuffer = frame.capturedImage

            

        // Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.

        let orientation = CGImagePropertyOrientation(isUsingFrontFacingCamera: false)

        let width = arView.frame.width

        let height = arView.frame.height

        

        self.currentBuffer = currentBuffer

        let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer, orientation: orientation)

        visionQueue.async {

            do {

                // Release the pixel buffer when done, allowing the next buffer to be processed.

                defer { self.currentBuffer = nil }

                // Perform VNDetectHumanHandPoseRequest

                try requestHandler.perform([self.handPoseRequest])

                // Continue only when a hand was detected in the frame.

                // Since we set the maximumHandCount property of the request to 1, there will be at most one observation.

                guard let observation = self.handPoseRequest.results?.first else {

                    return

                }

                // Get points for index finger.

                let indexFingerPoints = try observation.recognizedPoints(.indexFinger)

                // Look for tip points.

                guard let indexTipPoint = indexFingerPoints[.indexTip] else {

                    return

                }

                // Ignore low confidence points.

                guard indexTipPoint.confidence > 0.3 else {

                    return

                }

                

                // Convert points from Vision coordinates to ARView coordinates.

                let point = indexTipPoint.location

                let tip = CGPoint(x: point.x * width, y: (1 - point.y) * height)

                

                let hitTestResult = arView.entity(at: tip)

                guard hitTestResult == self.box?.steelBox else { return }

                

                DispatchQueue.main.async {

                    self.box?.notifications.notificationTrigger.post()

                }

            } catch {

                print("Error: Vision request failed with error \"\(error)\"")

            }

        }

    }

}


extension CGImagePropertyOrientation {

    init(isUsingFrontFacingCamera: Bool, deviceOrientation: UIDeviceOrientation = UIDevice.current.orientation) {

        switch deviceOrientation {

        case .portraitUpsideDown:

            self = .left

        case .landscapeLeft:

            self = isUsingFrontFacingCamera ? .down : .up

        case .landscapeRight:

            self = isUsingFrontFacingCamera ? .up : .down

        default:

            self = .right

        }

    }

}


沒有留言: