如何在 ARKit 3 动作捕捉中获取所有关节 XYZ 坐标

Posted

技术标签:

【中文标题】如何在 ARKit 3 动作捕捉中获取所有关节 XYZ 坐标【英文标题】:How can I get all joint XYZ coordinates in ARKit 3 motion capture 【发布时间】:2020-03-22 03:03:45 【问题描述】:

我正在尝试编写以获取 ARKit 3 动作捕捉中的所有关节 XYZ 坐标,以获取 XYZ 以找到身体的所有角度。但我找不到 XYZ 坐标,所以无法进行下一步。

我正在尝试使用character!.jointTransforms 获取关节数据转换,可能是 XYZ 坐标,但我尝试跟踪一些运动但数字没有改变。

这是我的代码

import UIKit
import RealityKit
import ARKit
import Combine

class ViewController: UIViewController, ARSessionDelegate 

    @IBOutlet var arView: ARView!

    var character: BodyTrackedEntity?
    let characterOffset: SIMD3<Float> = [0.0, 0, 0]
    let characterAnchor = AnchorEntity()

    override func viewDidAppear(_ animated: Bool) 
        super.viewDidAppear(animated)
        arView.session.delegate = self

        guard ARBodyTrackingConfiguration.isSupported else 
            fatalError("This feature is only supported on devices with an A12 chip")
        

        let configuration = ARBodyTrackingConfiguration()
        arView.session.run(configuration)

        arView.scene.addAnchor(characterAnchor)

        // Asynchronously load the 3D character.
        var cancellable: AnyCancellable? = nil
        cancellable = Entity.loadBodyTrackedAsync(named: "character/robot").sink(
            receiveCompletion:  completion in
                if case let .failure(error) = completion 
                    print("Error: Unable to load model: \(error.localizedDescription)")
                
                cancellable?.cancel()
        , receiveValue:  (character: Entity) in
            if let character = character as? BodyTrackedEntity 
                // Scale the character to human size
                character.scale = [1.0, 1.0, 1.0]
                self.character = character
                cancellable?.cancel()
             else 
                print("Error: Unable to load model as BodyTrackedEntity")
            
        )
    

    func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) 
        for anchor in anchors 
            guard let bodyAnchor = anchor as? ARBodyAnchor else  continue 

            let bodyPosition = simd_make_float3(bodyAnchor.transform.columns.3)
            characterAnchor.position = bodyPosition + characterOffset
            characterAnchor.orientation = Transform(matrix: bodyAnchor.transform).rotation

            if let character = character, character.parent == nil 
                characterAnchor.addChild(character)
            

            let XYZArray = character!.jointTransforms.map( (transform: RealityKit.Transform) -> SIMD3<Float> in
                transform.translation
            )

            print(XYZArray)
        
    

代码参考

https://developer.apple.com/documentation/arkit/capturing_body_motion_in_3d

【问题讨论】:

【参考方案1】:

查看这个库 PoseKit - https://github.com/d1l4y/PoseKit

import PoseKit


let poseKit = PoseKit()

func session(_ session: ARSession, didUpdate anchors: [ARAnchor]) 
    for anchor in anchors 
        guard let bodyAnchor = anchor as? ARBodyAnchor else  continue 

        print(poseKit.BodyTrackingPosition(bodyAnchor: bodyAnchor))
    
 


/// Gets the **hand's** position related to the **elbow** by comparing the angle between the forearm and the upper arm.
    func ForearmToHandPos(bodyAnchor: ARBodyAnchor, forearmSubcase: ShoulderToForearmSubcase, HandTransform: simd_float4, ForearmTransform: simd_float4, ShoulderTransform: simd_float4, leftArm: Bool) -> ForearmToHandSubcase 

        let handForearmVector = bodyPart.vector(joint1: ForearmTransform, joint2: HandTransform)
        let shoulderForearmVector  = bodyPart.vector(joint1: ForearmTransform, joint2: ShoulderTransform)

        let forearmAngle = abs(bodyPart.angle(vector1: handForearmVector, vector2: shoulderForearmVector))
        let crossVector = (simd_normalize(simd_cross(handForearmVector, shoulderForearmVector))) //produto vetorial deve bastar para saber o sentido

        if simd_distance(ShoulderTransform, HandTransform) > 0.57 && forearmAngle > 125.0 
            return .straightHorizontal
        
        if forearmSubcase == .verticalDownDiagonalBack || forearmSubcase == .verticalDownDiagonalFront || forearmSubcase == .verticalDownParallel || forearmSubcase == .horizontalTransverse 
            return ForearmToHandDownCase(forearmAngle: forearmAngle, crossVector: crossVector, leftArm: leftArm)
         else if leftArm 
            if crossVector.z < 0 
                if forearmAngle > 105.0  return .bentDownOut 
                else if forearmAngle > 80  return .bentDown 
                else if forearmAngle > 55  return .bentDownIn 

             else 
                if forearmAngle > 105.0  return .bentUpOut 
                else if forearmAngle > 80  return .bentUp 
                else if forearmAngle > 55  return .bentUpIn 
            
         else 
            if crossVector.z < 0 
                if forearmAngle > 105.0  return .bentUpOut 
                else if forearmAngle > 80  return .bentUp 
                else if forearmAngle > 55  return .bentUpIn 

             else 
                if forearmAngle > 105.0  return .bentDownOut 
                else if forearmAngle > 80  return .bentDown 
                else if forearmAngle > 55  return .bentDownIn 
            
        
            return .horizontalBentIn
        

【讨论】:

以上是关于如何在 ARKit 3 动作捕捉中获取所有关节 XYZ 坐标的主要内容,如果未能解决你的问题,请参考以下文章

体姿评测系统开发

ARKit3 如何使用 TrueDepth 相机进行人脸跟踪和其他人的面部网格?

开源项目VNect使用普通手机摄像头进行动作捕捉,媲美深度摄像头

ARKit从入门到精通-ARCamera介绍

如何录制 ARKit 场景但排除 UI 元素?

光学动作捕捉系统中的反光标识点(Marker点)