I'm japanese and I'm a beginner. This is first time that i post to stackoverflow.
Now, I am trying to make location base AR app, that specific objects appear in specific position, using ARkit and CoreLocation.
For example, sphere3Node appear in latitude > 35.5 (refer in my source code).
This code is working successfully, but sphere3Node cannot appear in screen.
import SceneKit
import ARKit
import CoreLocation
import UIKit
class ViewController: UIViewController, ARSCNViewDelegate, CLLocationManagerDelegate{
var locationManager = CLLocationManager()
var locationData : CLLocation? = nil
#IBOutlet weak var idoLabel: UILabel!
#IBOutlet weak var keidoLabel: UILabel!
#IBOutlet weak var hyoukouLabel: UILabel!
#IBOutlet weak var hanteiLabel: UILabel!
private let label: UILabel = UILabel()
#IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
locationManager = CLLocationManager()
locationManager.requestWhenInUseAuthorization()
locationManager.delegate = self
setupLocationService()
self.sceneView = ARSCNView(frame: self.view.frame)
self.label.frame = CGRect(x:0, y:0, width: self.sceneView.frame.size.width, height:50 )
self.label.center = self.sceneView.center
self.label.textAlignment = .center
self.label.textColor = UIColor.red
self.label.font = UIFont.preferredFont(forTextStyle: .headline)
self.label.alpha = 0
self.sceneView.addSubview(self.label)
self.sceneView.debugOptions = [ARSCNDebugOptions.showFeaturePoints,ARSCNDebugOptions.showWorldOrigin]
self.view.addSubview(self.sceneView)
// Set the view's delegate
sceneView.delegate = self
// Show statistics such as fps and timing information
sceneView.showsStatistics = true
// Create a new scene
let scene = SCNScene()
// box blue
//Color Setting , random
let random: CGFloat = CGFloat(arc4random() % 10)
let r: CGFloat = CGFloat(arc4random_uniform(255)+1) / 255.0
let g: CGFloat = CGFloat(arc4random_uniform(255)+1) / 255.0
let b: CGFloat = CGFloat(arc4random_uniform(255)+1) / 255.0
//let color: UIColor = UIColor(red: r, green: g, blue: b, alpha: 1.0)
// node (Box)
let box = SCNBox(width: 0.2, height: 0.2, length: 0.2, chamferRadius: 0)
let material = SCNMaterial()
material.diffuse.contents = UIColor(red:r, green:g, blue:b, alpha: 1)
let node = SCNNode()
node.geometry = box
material.name = "Color"
node.geometry?.materials = [material]
node.position = SCNVector3(0, 0.2, -1)
//Object Setting
// sphereNode green
let sphere = SCNSphere(radius: 0.2)
let sphereMaterial = SCNMaterial()
// sphereMaterial.diffuse.contents = UIColor.green
sphereMaterial.diffuse.contents = UIImage(named: "earthmap.jpeg")
let sphereNode = SCNNode()
sphereNode.geometry = sphere
sphereNode.geometry?.materials = [sphereMaterial]
sphereNode.position = SCNVector3(0.5, 0.1, -1)
// sphere2Node red
let sphere2 = SCNSphere(radius: 0.1)
let sphere2Material = SCNMaterial()
sphere2Material.diffuse.contents = UIColor.red
let sphere2Node = SCNNode()
sphere2Node.geometry = sphere2
sphere2Node.geometry?.materials = [sphere2Material]
sphere2Node.position = SCNVector3(0.4, 0.1, -3)
// sphere3Node blue
let sphere3 = SCNSphere(radius: 0.1)
let sphere3Material = SCNMaterial()
sphere3Material.diffuse.contents = UIColor.blue
let sphere3Node = SCNNode()
sphere3Node.geometry = sphere3
sphere3Node.geometry?.materials = [sphere3Material]
sphere3Node.position = SCNVector3(0.6, 0.2, -3)
scene.rootNode.addChildNode(node)
scene.rootNode.addChildNode(sphereNode)
scene.rootNode.addChildNode(sphere2Node)
if let locationData = locationData, locationData.coordinate.latitude > 35.5 {
scene.rootNode.addChildNode(sphere3Node)
}
let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(tapped))
self.sceneView.addGestureRecognizer(tapRecognizer)
// Set the scene to the view
sceneView.scene = scene
}
//objective-C func tapped
#objc func tapped(recognizer: UIGestureRecognizer) {
let sceneView = recognizer.view as! SCNView
let touchLocation = recognizer.location(in: sceneView)
let hitResults = sceneView.hitTest(touchLocation, options: [:])
if !hitResults.isEmpty {
let node = hitResults[0].node
let material = node.geometry?.material(named: "Color")
material?.diffuse.contents = UIImage(named: "earthmap.jpeg")
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Release any cached data, images, etc that aren't in use.
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
DispatchQueue.main.async{
self.label.text = "plane detected"
UIView.animate(withDuration: 3.0, animations: {
self.label.alpha = 1.0
}){(completion: Bool) in
self.label.alpha = 0.0
}
}
}
/*
// Override to create and configure nodes for anchors added to the view's session.
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let node = SCNNode()
return node
}
*/
func session(_ session: ARSession, didFailWithError error: Error) {
// Present an error message to the user
}
func sessionWasInterrupted(_ session: ARSession) {
// Inform the user that the session has been interrupted, for example, by presenting an overlay
}
func sessionInterruptionEnded(_ session: ARSession) {
// Reset tracking and/or remove existing anchors if consistent tracking is required
}
//location
func setupLocationService(){
locationManager.desiredAccuracy = kCLLocationAccuracyBest
locationManager.distanceFilter = 1
}
func locationManager(_ manager: CLLocationManager,
didChangeAuthorization status: CLAuthorizationStatus) {
switch status{
case .authorizedAlways, .authorizedWhenInUse :
locationManager.startUpdatingLocation()
case .notDetermined:
locationManager.stopUpdatingLocation()
//disabledLocationLabel()
default:
locationManager.stopUpdatingLocation()
//disabledLocationLabel()
}
}
private func locationManager(_ manager: CLLocationManager,
didUpdateLocations locations: [CLLocation]){
let locationData = locations.last
if var ido = locationData?.coordinate.latitude {
ido = round(ido*1000000)/100000
}
if var keido = locationData?.coordinate.longitude{
keido = round(keido*1000000)/1000000
}
if var hyoukou = locationData?.altitude{
hyoukou = round(hyoukou*100)/100
}
}
}
That's all.Thank you.
Related
I am trying to build an app to draw graffiti in ARKit using bare hands.
The graffiti should look realistic. I have gone through many examples and I see most of us using SCNSphere. Well, It does do that job but there are gaps between each sphere that do not give a realistic touch.
How do we come up with a brush/drawn line effect?
Is scenekit the best way to do this or shall we try spritekit/Unity?
My basic code looks like this:
import UIKit
import ARKit
import SceneKit
class ViewController : UIViewController, ARSCNViewDelegate, ARSessionDelegate {
#IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
sceneView.delegate = self as ARSCNViewDelegate
sceneView.showsStatistics = true
sceneView.autoenablesDefaultLighting = true
sceneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(panGesture(_:))))
}
override func loadView() {
sceneView = ARSCNView(frame:CGRect(x: 0.0, y: 0.0, width: UIScreen.main.bounds.height, height: UIScreen.main.bounds.width))
sceneView.delegate = self
let config = ARWorldTrackingConfiguration()
config.planeDetection = [.horizontal, .vertical]
sceneView.session.delegate = self
self.view = sceneView
sceneView.session.run(config)
}
#objc func panGesture(_ gesture: UIPanGestureRecognizer) {
gesture.minimumNumberOfTouches = 1
guard let query = sceneView.raycastQuery(from: gesture.location(in: gesture.view), allowing: .existingPlaneInfinite, alignment: .any) else {
return
}
let results = sceneView.session.raycast(query)
guard let hitTestResult = results.first else {
return
}
let position = SCNVector3Make(hitTestResult.worldTransform.columns.3.x, hitTestResult.worldTransform.columns.3.y, hitTestResult.worldTransform.columns.3.z)
let sphere = SCNSphere(radius: 0.5)
let material = SCNMaterial()
material.diffuse.contents = UIColor.blue
sphere.materials = [material]
let sphereNode = SCNNode()
sphereNode.scale = SCNVector3(x:0.004,y:0.004,z:0.004)
sphereNode.geometry = sphere
sphereNode.position = SCNVector3(x: 0, y:0.02, z: -1)
self.sceneView.scene.rootNode.addChildNode(sphereNode)
sphereNode.position = position
}
}
I use this simple code for playing 360 video. I need to add a point to the video - there is no problem with that. But how to track clicks on it? In this example, adding a point occurs in the viewDidLoad method.
I tried touchesBegan, but this method does not work. I really hope for your help
class ViewControllerTwo: UIViewController {
let motionManager = CMMotionManager()
let cameraNode = SCNNode()
var sphereNode: SCNNode!
#IBOutlet weak var sceneView: SCNView!
func createSphereNode(material: AnyObject?) -> SCNNode {
let sphere = SCNSphere(radius: 100.0)
sphere.segmentCount = 96
sphere.firstMaterial!.isDoubleSided = true
sphere.firstMaterial!.diffuse.contents = material
let sphereNode = SCNNode(geometry: sphere)
sphereNode.position = SCNVector3Make(0,0,0)
return sphereNode
}
func configureScene(node sphereNode: SCNNode) {
let scene = SCNScene()
sceneView.scene = scene
sceneView.showsStatistics = true
sceneView.allowsCameraControl = true
cameraNode.camera = SCNCamera()
cameraNode.position = SCNVector3Make(0, 0, 0)
scene.rootNode.addChildNode(sphereNode)
scene.rootNode.addChildNode(cameraNode)
}
func startCameraTracking() {
motionManager.deviceMotionUpdateInterval = 1.0 / 60.0
motionManager.startDeviceMotionUpdates(to: .main) { [weak self] (data, error) in
guard let data = data else { return }
let attitude: CMAttitude = data.attitude
self?.cameraNode.eulerAngles = SCNVector3Make(Float(attitude.roll + Double.pi/2.0), -Float(attitude.yaw), -Float(attitude.pitch))
}
}
override func viewDidLoad() {
super.viewDidLoad()
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "google-help-vr", ofType: "mp4")!)
let player = AVPlayer(url: url )
let videoNode = SKVideoNode(avPlayer: player)
let size = CGSize(width: 1025, height: 512)
videoNode.size = size
videoNode.position = CGPoint(x: size.width / 2, y: size.height / 2)
let spriteScene = SKScene(size: size)
spriteScene.addChild(videoNode)
// How to detect when tapped?
let circ = SKShapeNode(rectOf: CGSize(width: 50, height: 50), cornerRadius: 25)
circ.fillColor = .red
circ.isUserInteractionEnabled = true
videoNode.addChild(circ)
sphereNode = createSphereNode(material:spriteScene)
configureScene(node: sphereNode)
guard motionManager.isDeviceMotionAvailable else {
return
}
startCameraTracking()
player.play()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
sceneView.play(self)
}
}
I did a self class for the object SKShapeNode, in order to track clicks through the touchesBegan method. But all without success
You can use a UITapGesture recognizer to get the 2D point then use SCNSceneRenderer .hitTest(_:options:) to get all of the possible intersections along that ray. Note that the method is on the SCNSceneRenderer protocol, which SCNView conforms to so you may have missed it in the SCNView documentation.
#IBAction func tap(_ recognizer: UITapGestureRecognizer) {
let location = recognizer.location(in: sceneView)
if let firstResult = sceneView.hitTest(location, options: nil).first,
//Do stuff with firstResult here
}
I'm working on a project wherein we have to detect a certain number of custom QR codes (as ARImageAnchors) and then using the position of these anchors to dynamically display a 3D overlay. To be exact, we are planning to dynamically display a 3D model of human anatomy over the anchors which will be placed on a mannequin. For example, the mannequin we are placing the QR codes on is smaller or bigger than the default size of the 3D model, we would like it to adapt based on the distances between the images. Below is the sample code I'm thinking of working off from (source: https://www.appcoda.com/arkit-image-recognition/).
import UIKit
import ARKit
class ViewController: UIViewController {
#IBOutlet weak var sceneView: ARSCNView!
#IBOutlet weak var label: UILabel!
let fadeDuration: TimeInterval = 0.3
let rotateDuration: TimeInterval = 3
let waitDuration: TimeInterval = 0.5
lazy var fadeAndSpinAction: SCNAction = {
return .sequence([
.fadeIn(duration: fadeDuration),
.rotateBy(x: 0, y: 0, z: CGFloat.pi * 360 / 180, duration: rotateDuration),
.wait(duration: waitDuration),
.fadeOut(duration: fadeDuration)
])
}()
lazy var fadeAction: SCNAction = {
return .sequence([
.fadeOpacity(by: 0.8, duration: fadeDuration),
.wait(duration: waitDuration),
.fadeOut(duration: fadeDuration)
])
}()
lazy var treeNode: SCNNode = {
guard let scene = SCNScene(named: "tree.scn"),
let node = scene.rootNode.childNode(withName: "tree", recursively: false) else { return SCNNode() }
let scaleFactor = 0.005
node.scale = SCNVector3(scaleFactor, scaleFactor, scaleFactor)
node.eulerAngles.x = -.pi / 2
return node
}()
lazy var bookNode: SCNNode = {
guard let scene = SCNScene(named: "book.scn"),
let node = scene.rootNode.childNode(withName: "book", recursively: false) else { return SCNNode() }
let scaleFactor = 0.1
node.scale = SCNVector3(scaleFactor, scaleFactor, scaleFactor)
return node
}()
lazy var mountainNode: SCNNode = {
guard let scene = SCNScene(named: "mountain.scn"),
let node = scene.rootNode.childNode(withName: "mountain", recursively: false) else { return SCNNode() }
let scaleFactor = 0.25
node.scale = SCNVector3(scaleFactor, scaleFactor, scaleFactor)
node.eulerAngles.x += -.pi / 2
return node
}()
override func viewDidLoad() {
super.viewDidLoad()
sceneView.delegate = self
configureLighting()
}
func configureLighting() {
sceneView.autoenablesDefaultLighting = true
sceneView.automaticallyUpdatesLighting = true
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
resetTrackingConfiguration()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
#IBAction func resetButtonDidTouch(_ sender: UIBarButtonItem) {
resetTrackingConfiguration()
}
func resetTrackingConfiguration() {
guard let referenceImages = ARReferenceImage.referenceImages(inGroupNamed: "AR Resources", bundle: nil) else { return }
let configuration = ARWorldTrackingConfiguration()
configuration.detectionImages = referenceImages
let options: ARSession.RunOptions = [.resetTracking, .removeExistingAnchors]
sceneView.session.run(configuration, options: options)
label.text = "Move camera around to detect images"
}
}
extension ViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
DispatchQueue.main.async {
guard let imageAnchor = anchor as? ARImageAnchor,
let imageName = imageAnchor.referenceImage.name else { return }
// TODO: Comment out code
// let planeNode = self.getPlaneNode(withReferenceImage: imageAnchor.referenceImage)
// planeNode.opacity = 0.0
// planeNode.eulerAngles.x = -.pi / 2
// planeNode.runAction(self.fadeAction)
// node.addChildNode(planeNode)
// TODO: Overlay 3D Object
let overlayNode = self.getNode(withImageName: imageName)
overlayNode.opacity = 0
overlayNode.position.y = 0.2
overlayNode.runAction(self.fadeAndSpinAction)
node.addChildNode(overlayNode)
self.label.text = "Image detected: \"\(imageName)\""
}
}
func getPlaneNode(withReferenceImage image: ARReferenceImage) -> SCNNode {
let plane = SCNPlane(width: image.physicalSize.width,
height: image.physicalSize.height)
let node = SCNNode(geometry: plane)
return node
}
func getNode(withImageName name: String) -> SCNNode {
var node = SCNNode()
switch name {
case "Book":
node = bookNode
case "Snow Mountain":
node = mountainNode
case "Trees In the Dark":
node = treeNode
default:
break
}
return node
}
}
I know that the 3D overlay is displayed in the renderer function above, but it is only displaying on top of a single detected image anchor. Now my question is, is it possible to reference multiple ARImage anchors to dynamically display a single 3D model?
Being a novice in ARKit and Swift in general, I'm not sure how to go about this problem yet. I'm hoping someone might have an idea of how to work around this and point me to the right direction. Any help will be greatly appreciated!
Thanks in advance!
I'm trying to build furniture placing AR app using ARKit,
I have got .scn chair and its PNG textures in my project, my app is supposed to detect horizontal plane then when the user taps the object is placed in the position were tapped.
But the object is not placed when I tapped.
ViewController:
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController {
#IBOutlet weak var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
addTapGestureToSceneView()
configureLighting()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
setUpSceneView()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
sceneView.session.pause()
}
func setUpSceneView() {
let configuration = ARWorldTrackingConfiguration()
configuration.planeDetection = .horizontal
sceneView.session.run(configuration)
sceneView.delegate = self
sceneView.debugOptions = [ARSCNDebugOptions.showFeaturePoints]
}
func configureLighting() {
sceneView.autoenablesDefaultLighting = true
sceneView.automaticallyUpdatesLighting = true
}
#objc func addShipToSceneView(withGestureRecognizer recognizer: UIGestureRecognizer) {
let tapLocation = recognizer.location(in: sceneView)
let hitTestResults = sceneView.hitTest(tapLocation, types: .existingPlaneUsingExtent)
guard let hitTestResult = hitTestResults.first else { return }
let translation = hitTestResult.worldTransform.translation
let x = translation.x
let y = translation.y
let z = translation.z
guard let scene = SCNScene(named: "art.scnassets/chair.scn"),
let shipNode = scene.rootNode.childNode(withName: "chair_DIFFUSE", recursively: false)
else {
print("Failed to render")
return
}
shipNode.position = SCNVector3(x,y,z)
sceneView.scene.rootNode.addChildNode(shipNode)
}
func addTapGestureToSceneView() {
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(ViewController.addShipToSceneView(withGestureRecognizer:)))
sceneView.addGestureRecognizer(tapGestureRecognizer)
}
}
extension float4x4 {
var translation: float3 {
let translation = self.columns.3
return float3(translation.x, translation.y, translation.z)
}
}
extension UIColor {
open class var transparentLightBlue: UIColor {
return UIColor(red: 90/255, green: 200/255, blue: 250/255, alpha: 0.50)
}
}
extension ViewController: ARSCNViewDelegate {
func renderer(_ renderer: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
// 1
guard let planeAnchor = anchor as? ARPlaneAnchor else { return }
// 2
let width = CGFloat(planeAnchor.extent.x)
let height = CGFloat(planeAnchor.extent.z)
let plane = SCNPlane(width: width, height: height)
// 3
plane.materials.first?.diffuse.contents = UIColor.transparentLightBlue
// 4
let planeNode = SCNNode(geometry: plane)
// 5
let x = CGFloat(planeAnchor.center.x)
let y = CGFloat(planeAnchor.center.y)
let z = CGFloat(planeAnchor.center.z)
planeNode.position = SCNVector3(x,y,z)
planeNode.eulerAngles.x = -.pi / 2
// 6
node.addChildNode(planeNode)
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
// 1
guard let planeAnchor = anchor as? ARPlaneAnchor,
let planeNode = node.childNodes.first,
let plane = planeNode.geometry as? SCNPlane
else { return }
// 2
let width = CGFloat(planeAnchor.extent.x)
let height = CGFloat(planeAnchor.extent.z)
plane.width = width
plane.height = height
// 3
let x = CGFloat(planeAnchor.center.x)
let y = CGFloat(planeAnchor.center.y)
let z = CGFloat(planeAnchor.center.z)
planeNode.position = SCNVector3(x, y, z)
}
}
So I get "failed to render" printed when I tap to place the object and nothing else is printed in the console !
So after a little code tweaking I fixed the issue with the following couple lines of code:
// Create a new scene
let scene = SCNScene(named: "art.scnassets/chair.scn")!
let node = scene.rootNode.childNode(withName: "chair", recursively: true)!
node.position = SCNVector3(x,y,z)
scene.rootNode.addChildNode(node)
//shipNode.position = SCNVector3(x,y,z)
sceneView.scene = scene
instead of:
guard let scene = SCNScene(named: "art.scnassets/chair.scn"),
let shipNode = scene.rootNode.childNode(withName: "chair_DIFFUSE", recursively: false)
else {
print("Failed to render")
return
}
shipNode.position = SCNVector3(x,y,z)
sceneView.scene.rootNode.addChildNode(shipNode)
I have a node object in 3d view and i need to drag that object,
So far i have tried from here : Placing, Dragging and Removing SCNNodes in ARKit
and converted in swift
#objc func handleDragGesture(_ gestureRecognizer: UIGestureRecognizer) {
let tapPoint = gestureRecognizer.location(in: self.sceneView)
switch gestureRecognizer.state {
case .began:
print("Object began to move")
let hitResults = self.sceneView.hitTest(tapPoint, options: nil)
if hitResults.isEmpty { return }
let hitResult = hitResults.first
if let node = hitResult?.node.parent?.parent?.parent {
self.photoNode = node
}
case .changed:
print("Moving object position changed")
if let _ = self.photoNode {
let hitResults = self.sceneView.hitTest(tapPoint, types: .featurePoint)
let hitResult = hitResults.last
if let transform = hitResult?.worldTransform {
let matrix = SCNMatrix4FromMat4(transform)
let vector = SCNVector3Make(matrix.m41, matrix.m42, matrix.m43)
self.photoNode?.position = vector
}
}
case .ended:
print("Done moving object")
default:
break
}
}
but it is not working properly. what is the correct way to do?
You can do this using panGestureRecongniser... see basic swift Playground code for handling a SCNNode.
import UIKit
import ARKit
import SceneKit
import PlaygroundSupport
public var textNode : SCNNode?
// Main ARKIT ViewController
class ViewController : UIViewController, ARSCNViewDelegate, ARSessionDelegate {
var textNode: SCNNode!
var counter = 0
#IBOutlet var sceneView: ARSCNView!
override func viewDidLoad() {
super.viewDidLoad()
// set the views delegate
sceneView.delegate = self as! ARSCNViewDelegate
// show statistics such as fps and timing information
sceneView.showsStatistics = true
// Create a new scene
sceneView.scene.rootNode
// Add ligthing
sceneView.autoenablesDefaultLighting = true
let text = SCNText(string: "Drag Me with Pan Gesture!", extrusionDepth: 1)
// create material
let material = SCNMaterial()
material.diffuse.contents = UIColor.green
text.materials = [material]
//Create Node object
textNode = SCNNode()
textNode.name = "textNode"
textNode.scale = SCNVector3(x:0.004,y:0.004,z:0.004)
textNode.geometry = text
textNode.position = SCNVector3(x: 0, y:0.02, z: -1)
// add new node to root node
self.sceneView.scene.rootNode.addChildNode(textNode)
// Add pan gesture for dragging the textNode about
sceneView.addGestureRecognizer(UIPanGestureRecognizer(target: self, action: #selector(panGesture(_:))))
}
override func loadView() {
sceneView = ARSCNView(frame:CGRect(x: 0.0, y: 0.0, width: 500.0, height: 600.0))
// Set the view's delegate
sceneView.delegate = self
let config = ARWorldTrackingConfiguration()
config.planeDetection = .horizontal
// Now we'll get messages when planes were detected...
sceneView.session.delegate = self
self.view = sceneView
sceneView.session.run(config)
}
#objc func panGesture(_ gesture: UIPanGestureRecognizer) {
gesture.minimumNumberOfTouches = 1
let results = self.sceneView.hitTest(gesture.location(in: gesture.view), types: ARHitTestResult.ResultType.featurePoint)
guard let result: ARHitTestResult = results.first else {
return
}
let position = SCNVector3Make(result.worldTransform.columns.3.x, result.worldTransform.columns.3.y, result.worldTransform.columns.3.z)
textNode.position = position
}
}
PlaygroundPage.current.liveView = ViewController()
PlaygroundPage.current.needsIndefiniteExecution = true
EDIT:
The above drag function only worked if you had 1 object in the view, so it was not really necessary to hit the node to start dragging. It will just drag from where ever you tapped on the screen. If you have multiple objects in the view, and you want to drag nodes independently. You could change the panGesture function to the following, detect each node tapped first:
// drags nodes independently
#objc func panGesture(_ gesture: UIPanGestureRecognizer) {
gesture.minimumNumberOfTouches = 1
let results = self.sceneView.hitTest(gesture.location(in: gesture.view), types: ARHitTestResult.ResultType.featurePoint)
guard let result: ARHitTestResult = results.first else {
return
}
let hits = self.sceneView.hitTest(gesture.location(in: gesture.view), options: nil)
if let tappedNode = hits.first?.node {
let position = SCNVector3Make(result.worldTransform.columns.3.x, result.worldTransform.columns.3.y, result.worldTransform.columns.3.z)
tappedNode.position = position
}
}
REF: https://stackoverflow.com/a/48220751/5589073
This code works for me
private func drag(sender: UIPanGestureRecognizer) {
switch sender.state {
case .began:
let location = sender.location(in: self.sceneView)
guard let hitNodeResult = self.sceneView.hitTest(location,
options: nil).first else { return }
self.PCoordx = hitNodeResult.worldCoordinates.x
self.PCoordy = hitNodeResult.worldCoordinates.y
self.PCoordz = hitNodeResult.worldCoordinates.z
case .changed:
// when you start to pan in screen with your finger
// hittest gives new coordinates of touched location in sceneView
// coord-pcoord gives distance to move or distance paned in sceneview
let hitNode = sceneView.hitTest(sender.location(in: sceneView), options: nil)
if let coordx = hitNode.first?.worldCoordinates.x,
let coordy = hitNode.first?.worldCoordinates.y,
let coordz = hitNode.first?.worldCoordinates.z {
let action = SCNAction.moveBy(x: CGFloat(coordx - self.PCoordx),
y: CGFloat(coordy - self.PCoordy),
z: CGFloat(coordz - self.PCoordz),
duration: 0.0)
self.photoNode.runAction(action)
self.PCoordx = coordx
self.PCoordy = coordy
self.PCoordz = coordz
}
sender.setTranslation(CGPoint.zero, in: self.sceneView)
case .ended:
self.PCoordx = 0.0
self.PCoordy = 0.0
self.PCoordz = 0.0
default:
break
}
}