ARKit 3.5 – 如何使用 LiDAR 从新 iPad Pro 导出 OBJ?

Posted

技术标签:

【中文标题】ARKit 3.5 – 如何使用 LiDAR 从新 iPad Pro 导出 OBJ?【英文标题】:ARKit – How to export OBJ from iPhone/iPad with LiDAR? 【发布时间】:2020-07-18 16:10:20 【问题描述】:

如何将最新 iPad Pro 上新的SceneReconstruction API 生成的ARMeshGeometry 导出到.obj 文件?

这是SceneReconstruction 文档。

【问题讨论】:

【参考方案1】:

此代码 sn-p 允许您将 LiDAR 的几何图形保存为 USD 并通过 AirDrop 将其发送到 Mac 计算机。您不仅可以导出.usd,还可以导出.usda.usdc.obj.stl.abc.ply文件格式。

此外,您可以使用 SceneKit 的 write(to:options:delegate:progressHandler:) 方法来保存 .usdz 版本的文件。

import RealityKit
import ARKit
import MetalKit
import ModelIO

@IBOutlet var arView: ARView!
var saveButton: UIButton!
let rect = CGRect(x: 50, y: 50, width: 100, height: 50)

override func viewDidLoad() 
    super.viewDidLoad()

    let tui = UIControl.Event.touchUpInside
    saveButton = UIButton(frame: rect)
    saveButton.setTitle("Save", for: [])
    saveButton.addTarget(self, action: #selector(saveButtonTapped), for: tui)
    self.view.addSubview(saveButton)


@objc func saveButtonTapped(sender: UIButton)         
    print("Saving is executing...")
    
    guard let frame = arView.session.currentFrame
    else  fatalError("Can't get ARFrame") 
            
    guard let device = MTLCreateSystemDefaultDevice()
    else  fatalError("Can't create MTLDevice") 
    
    let allocator = MTKMeshBufferAllocator(device: device)        
    let asset = MDLAsset(bufferAllocator: allocator)       
    let meshAnchors = frame.anchors.compactMap  $0 as? ARMeshAnchor 
    
    for ma in meshAnchors 
        let geometry = ma.geometry
        let vertices = geometry.vertices
        let faces = geometry.faces
        let vertexPointer = vertices.buffer.contents()
        let facePointer = faces.buffer.contents()
        
        for vtxIndex in 0 ..< vertices.count 
            
            let vertex = geometry.vertex(at: UInt32(vtxIndex))                
            var vertexLocalTransform = matrix_identity_float4x4
            
            vertexLocalTransform.columns.3 = SIMD4<Float>(x: vertex.0,
                                                          y: vertex.1,
                                                          z: vertex.2,
                                                          w: 1.0)
            
            let vertexWorldTransform = (ma.transform * vertexLocalTransform).position                
            let vertexOffset = vertices.offset + vertices.stride * vtxIndex               
            let componentStride = vertices.stride / 3
            
            vertexPointer.storeBytes(of: vertexWorldTransform.x,
                           toByteOffset: vertexOffset,
                                     as: Float.self)
            
            vertexPointer.storeBytes(of: vertexWorldTransform.y,
                           toByteOffset: vertexOffset + componentStride,
                                     as: Float.self)
            
            vertexPointer.storeBytes(of: vertexWorldTransform.z,
                           toByteOffset: vertexOffset + (2 * componentStride),
                                     as: Float.self)
        
        
        let byteCountVertices = vertices.count * vertices.stride            
        let byteCountFaces = faces.count * faces.indexCountPerPrimitive * faces.bytesPerIndex
        
        let vertexBuffer = allocator.newBuffer(with: Data(bytesNoCopy: vertexPointer, 
                                                                count: byteCountVertices, 
                                                          deallocator: .none), type: .vertex)
        
        let indexBuffer = allocator.newBuffer(with: Data(bytesNoCopy: facePointer, 
                                                               count: byteCountFaces, 
                                                         deallocator: .none), type: .index)
        
        let indexCount = faces.count * faces.indexCountPerPrimitive            
        let material = MDLMaterial(name: "material", 
                     scatteringFunction: MDLPhysicallyPlausibleScatteringFunction())
        
        let submesh = MDLSubmesh(indexBuffer: indexBuffer, 
                                  indexCount: indexCount, 
                                   indexType: .uInt32, 
                                geometryType: .triangles, 
                                    material: material)
        
        let vertexFormat = MTKModelIOVertexFormatFromMetal(vertices.format)
        
        let vertexDescriptor = MDLVertexDescriptor()
        
        vertexDescriptor.attributes[0] = MDLVertexAttribute(name: MDLVertexAttributePosition, 
                                                          format: vertexFormat, 
                                                          offset: 0, 
                                                     bufferIndex: 0)
        
        vertexDescriptor.layouts[0] = MDLVertexBufferLayout(stride: ma.geometry.vertices.stride)
        
        let mesh = MDLMesh(vertexBuffer: vertexBuffer, 
                            vertexCount: ma.geometry.vertices.count, 
                             descriptor: vertexDescriptor, 
                              submeshes: [submesh])

        asset.add(mesh)
    

    let filePath = FileManager.default.urls(for: .documentDirectory, 
                                             in: .userDomainMask).first!
    
    let usd: URL = filePath.appendingPathComponent("model.usd")

    if MDLAsset.canExportFileExtension("usd") 
        do 
            try asset.export(to: usd)
            
            let controller = UIActivityViewController(activityItems: [usd],
                                              applicationActivities: nil)
            controller.popoverPresentationController?.sourceView = sender
            self.present(controller, animated: true, completion: nil)

         catch let error 
            fatalError(error.localizedDescription)
        
     else 
        fatalError("Can't export USD")
    

按下Save 按钮后,在Activity View Controller 中选择More 并通过AirDrop 将准备好的模型发送到Mac 的Downloads 文件夹。

附:在这里您可以找到有关capturing real-world texture 的额外信息。

【讨论】:

您可以,请参阅@swiftcoder 的回答。 ARMeshGeometry 的文档中甚至还有示例代码 必须编写一些代码来做到这一点,并不意味着你不能。你会说不,如果 Apple 会为自己保留这些信息,但在这种情况下是不正确的。 你能给我们这个示例的 git 链接吗?我在尝试运行此代码时收到此错误Value of type 'ARMeshGeometry' has no member 'vertex' @AndyFedoroff 是的,它看起来不错,但我不喜欢网格本身的精度!我不知道是因为我使用的是 2018 iPad Pro,还是所有设备都一样。 不客气!感谢您的精彩文章和 SO 帖子:)【参考方案2】:

从 Apple 的 Visualising Scene Scemantics 示例应用程序开始,您可以从框架中的第一个锚点检索 ARMeshGeometry 对象。

导出数据最简单的方法是首先将其转换为 MDLMesh:

extension ARMeshGeometry 
    func toMDLMesh(device: MTLDevice) -> MDLMesh 
        let allocator = MTKMeshBufferAllocator(device: device);

        let data = Data.init(bytes: vertices.buffer.contents(), count: vertices.stride * vertices.count);
        let vertexBuffer = allocator.newBuffer(with: data, type: .vertex);

        let indexData = Data.init(bytes: faces.buffer.contents(), count: faces.bytesPerIndex * faces.count * faces.indexCountPerPrimitive);
        let indexBuffer = allocator.newBuffer(with: indexData, type: .index);

        let submesh = MDLSubmesh(indexBuffer: indexBuffer,
                                 indexCount: faces.count * faces.indexCountPerPrimitive,
                                 indexType: .uInt32,
                                 geometryType: .triangles,
                                 material: nil);

        let vertexDescriptor = MDLVertexDescriptor();
        vertexDescriptor.attributes[0] = MDLVertexAttribute(name: MDLVertexAttributePosition,
                                                            format: .float3,
                                                            offset: 0,
                                                            bufferIndex: 0);
        vertexDescriptor.layouts[0] = MDLVertexBufferLayout(stride: vertices.stride);

        return MDLMesh(vertexBuffer: vertexBuffer,
                       vertexCount: vertices.count,
                       descriptor: vertexDescriptor,
                       submeshes: [submesh]);
    


拥有 MDLMesh 后,导出到 OBJ 文件是一件轻而易举的事:

    @IBAction func exportMesh(_ button: UIButton) 
        let meshAnchors = arView.session.currentFrame?.anchors.compactMap( $0 as? ARMeshAnchor );

        DispatchQueue.global().async 

            let directory = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0];
            let filename = directory.appendingPathComponent("MyFirstMesh.obj");

            guard let device = MTLCreateSystemDefaultDevice() else 
                print("metal device could not be created");
                return;
            ;

            let asset = MDLAsset();

            for anchor in meshAnchors! 
                let mdlMesh = anchor.geometry.toMDLMesh(device: device);
                asset.add(mdlMesh);
            

            do 
                try asset.export(to: filename);
             catch 
                print("failed to write to file");
            
        
    

【讨论】:

嗨@swiftcoder!谢谢你的回答。看起来很有说服力。你测试过吗? OBJ 导出是否有效?我无法测试它,因为我没有配备激光雷达扫描仪的 iPad。 是的,我使用此代码(添加到示例应用程序)来扫描我公寓中的物体。请注意,如果您扫描大面积区域,最终会得到多个网格锚点,因此您需要为每个锚点运行此代码,并将它们全部添加到 MDLAsset。 感谢@swiftcoder!我在哪里放置代码 let mdlMesh = anchor.geometry.toMDLMesh()... 在示例中?你为此使用了额外的 ibaction 吗? 是的,我添加了一个新的 IBAction(我已经更新了答案以包含它),然后将它连接到 UI 中的“导出”按钮。 .obj 到底保存在哪里?或者我怎样才能访问它?【参考方案3】:

@swiftcoder 的回答效果很好。但是在有多个锚点的情况下,您需要根据锚点变换将顶点坐标转换为世界坐标系。在相反的情况下,所有网格都将放置在零位置,您将一团糟。

更新后的代码如下所示:

extension ARMeshGeometry 
    func toMDLMesh(device: MTLDevice, transform: simd_float4x4) -> MDLMesh 
        let allocator = MTKMeshBufferAllocator(device: device)

        let data = Data.init(bytes: transformedVertexBuffer(transform), count: vertices.stride * vertices.count)
        let vertexBuffer = allocator.newBuffer(with: data, type: .vertex)

        let indexData = Data.init(bytes: faces.buffer.contents(), count: faces.bytesPerIndex * faces.count * faces.indexCountPerPrimitive)
        let indexBuffer = allocator.newBuffer(with: indexData, type: .index)

        let submesh = MDLSubmesh(indexBuffer: indexBuffer,
                                 indexCount: faces.count * faces.indexCountPerPrimitive,
                                 indexType: .uInt32,
                                 geometryType: .triangles,
                                 material: nil)

        let vertexDescriptor = MDLVertexDescriptor()
        vertexDescriptor.attributes[0] = MDLVertexAttribute(name: MDLVertexAttributePosition,
                                                            format: .float3,
                                                            offset: 0,
                                                            bufferIndex: 0)
        vertexDescriptor.layouts[0] = MDLVertexBufferLayout(stride: vertices.stride)

        return MDLMesh(vertexBuffer: vertexBuffer,
                       vertexCount: vertices.count,
                       descriptor: vertexDescriptor,
                       submeshes: [submesh])
    

    func transformedVertexBuffer(_ transform: simd_float4x4) -> [Float] 
        var result = [Float]()
        for index in 0..<vertices.count 
            let vertexPointer = vertices.buffer.contents().advanced(by: vertices.offset + vertices.stride * index)
            let vertex = vertexPointer.assumingMemoryBound(to: (Float, Float, Float).self).pointee
            var vertextTransform = matrix_identity_float4x4
            vertextTransform.columns.3 = SIMD4<Float>(vertex.0, vertex.1, vertex.2, 1)
            let position = (transform * vertextTransform).position
            result.append(position.x)
            result.append(position.y)
            result.append(position.z)
        
        return result
    


extension simd_float4x4 
    var position: SIMD3<Float> 
        return SIMD3<Float>(columns.3.x, columns.3.y, columns.3.z)
    


extension Array where Element == ARMeshAnchor 
    func save(to fileURL: URL, device: MTLDevice) throws 
        let asset = MDLAsset()
        self.forEach 
            let mesh = $0.geometry.toMDLMesh(device: device, transform: $0.transform)
            asset.add(mesh)
        
        try asset.export(to: fileURL)
    

我不是 ModelIO 方面的专家,也许有更简单的方法来转换顶点缓冲区 :) 但是这段代码对我有用。

【讨论】:

看起来很棒!您能给我们一个完整的 ViewController.swift 示例或将您的项目上传到 github 吗? 好的,弗洛里安,你来了。 github.com/alexander-gaidukov/LiDarDetector 这太好了,有没有办法同时保存模型的纹理? 遗憾的是没有顶点颜色或纹理支持。 我能够添加纹理坐标并导出网格。在此处添加方法:***.com/a/61790146/1072825

以上是关于ARKit 3.5 – 如何使用 LiDAR 从新 iPad Pro 导出 OBJ?的主要内容,如果未能解决你的问题,请参考以下文章

有啥方法可以从 LiDAR iOS14 导出点云数据?

如何使用 ARKit / swift ViewController 创建 React Native 组件?

如何改善 ARKit 3.0 中的人物遮挡

视觉框架坐标系如何转化为ARKit?

ARKit - 如何沿识别的表面移动物体?

ARKit 或 RealityKit——我们如何检测真实世界的物体?