ARKit
2019-11-15 本文已影响0人
纳兰沫
AR只是辅助功能的话 项目里面一定使用ARWorldTrackingConfiguration.isSupported 用各个配置下面的isSupported来判断用户的iOS设备是否支持AR功能
AR功能是主要功能的话 需要在info.plist里面设置 如图1
创建静态AR
1.新建一个scn后缀的文件 把里面的内容删掉 拖进去一个shape
2.去https://www.solarsystemscope.com/textures/地址下载质地
3.修改scn里面的位置 旋转角度 大小 修改他的质地
4.在viewController里面修改 let scene = SCNScene(named: "art.scnassets/sphere.scn")!的名称
即可展示静态AR
创建动态AR
创建3D球体
//几何体 3D球体
let sphere = SCNSphere(radius: 0.2)
//贴纸
sphere.firstMaterial?.diffuse.contents = UIImage(named: "art.scnassets/8k_earth_daymap.jpg")
//结点 创建结点 用于渲染这个3D模型
let node = SCNNode(geometry: sphere)
//规定节点在现实环境中的三维位置
node.position = SCNVector3(0, 0, -0.5)
//rootNode 相当于在scnassets中的一个.scn文件 addChildNode 加子节点用于渲染并显示出来
sceneView.scene.rootNode.addChildNode(node)
如何把obj文件转化为usdz文件
1.打开终端
2.输入xcrun usdz_converter obj文件的路径 转化完之后的路径(取名为usdz的后缀)
3.然后如图2 转化为scn文件(xcode里面)
图2.png
平面检测
override func viewDidLoad() {
super.viewDidLoad()
// Set the view's delegate
sceneView.delegate = self
// Show statistics such as fps and timing information
sceneView.showsStatistics = true
//是否自动给模型打光
sceneView.automaticallyUpdatesLighting = true
//显示视频统计信息 - 用于调试
sceneView.showsStatistics = true
sceneView.debugOptions = .showFeaturePoints
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
//ARWorldTrackingConfiguration 跟现实环境融为一体
let configuration = ARWorldTrackingConfiguration()
//平面检测 水平平面
configuration.planeDetection = .horizontal
// Run the view's session
sceneView.session.run(configuration)
}
// MARK: - ARSCNViewDelegate
//ARAnchor 一个包含真实世界位置 方向 大小 等信息的东西 可用来放3D模型以便和真实世界无缝衔接
//当设定了configuration里面的平面检测之后 session会自动添加到ARAnchor(也就是说系统会自动帮我们检测平面 并把检测到的平面的大小 位置 方向等信息放在ARAnchor这个参数里面)
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
//筛选出检测平面的那部分anchor-anchor是虚拟的 要展示出来的话 必须添加节点
if let planeAnchor = anchor as? ARPlaneAnchor {
//设定几何体(一个平面控件) 并加上贴纸
let plane = SCNPlane(width: CGFloat(planeAnchor.extent.x), height: CGFloat(planeAnchor.extent.z))
guard let material = plane.firstMaterial else { return }
material.diffuse.contents = UIColor.init(white: 1, alpha: 0.5)
//根据这个几何体 创建节点并设定位置
let planeNode = SCNNode(geometry: plane)
planeNode.simdPosition = planeAnchor.center//三维位置向量 (包括位置和方向的量)
planeNode.eulerAngles.x = -.pi/2
//加到空节点b中以便显示出来
node.addChildNode(planeNode)
}
}
根据平面 用户点击点 放置模型
//当用户触摸到真实环境中的水平面的时候才放入茶壶
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
//获取点击的二维坐标
guard let location = touches.first?.location(in: sceneView) else{return}
//转换为三维坐标
guard let result = sceneView.hitTest(location, types: .existingPlaneUsingExtent).first else{return}
let position = result.worldTransform.columns.3
//根据这个三维坐标放置一个茶壶3D模型
guard let scene = SCNScene(named: "art.scnassets/teapot.scn") else{return}
guard let teapotNode = scene.rootNode.childNode(withName: "Teapot", recursively: true) else{return}
teapotNode.position = SCNVector3(position.x, position.y, position.z)
//这里一定要用sceneView 找到rootNode
sceneView.scene.rootNode.addChildNode(teapotNode)
}
测距仪
var nodes: [SCNNode] = []
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
if nodes.count >= 2 {
for node in nodes {
node.removeFromParentNode()
}
nodes = []
}
guard let location = touches.first?.location(in: sceneView) else{return}
guard let result = sceneView.hitTest(location, types: .featurePoint).first else{return}
let position = result.worldTransform.columns.3
let sphere = SCNSphere(radius: 0.005)
sphere.firstMaterial?.diffuse.contents = UIColor.yellow
let node = SCNNode(geometry: sphere)
node.position = SCNVector3(x: position.x, y: position.y, z: position.z)
sceneView.scene.rootNode.addChildNode(node)
nodes.append(node)
if nodes.count >= 2 {
let p1 = nodes[0].position
let p2 = nodes[1].position
let distance = abs(sqrt(pow(p1.x-p2.x, 2) + pow(p1.y-p2.y, 2) + pow(p1.z-p2.z, 2)))
print(distance)
}
}
图像识别
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
configuration.detectionImages = ARReferenceImage.referenceImages(inGroupNamed: "Pake Cards", bundle: nil)
configuration.maximumNumberOfTrackedImages = 1
// Run the view's session
sceneView.session.run(configuration)
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
DispatchQueue.main.async {
guard let imageARAnchor = anchor as? ARImageAnchor else { return }
let planeNode = SCNNode(geometry: SCNPlane(width: imageARAnchor.referenceImage.physicalSize.width, height: imageARAnchor.referenceImage.physicalSize.height))
planeNode.opacity = 0.25
//逆时针旋转
planeNode.eulerAngles.x = -.pi/2
guard let eeveeNode = SCNScene(named: "art.scnassets/eevee.scn")?.rootNode.childNode(withName: "eevee", recursively: true) else { return }
node.addChildNode(planeNode)
node.addChildNode(eeveeNode)
}
}