Created
September 22, 2017 14:09
-
-
Save RNHTTR/2bc350983a602bab46d5490e7be67ebc to your computer and use it in GitHub Desktop.
Interacting with Augmented Reality objects is not as hard as you might think
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// Using touch gestures to interact with your Augmented Reality objects actually doesn't involve more ARKit code! Rather, | |
// you'll have to implement some new SCNKit code and play with a UITapGestureRecognizer from good ol' UIKit. Here we will | |
// create a Super Mario box, and it will pop up as if you were Mario jumping underneath it! | |
// If you're jumping straight into this view controller, you might need to update your info.plist (Information Property | |
// List) file first. Add the key "Privacy - Camera Usage Description" with the corresponding value "This application will | |
// use your camera for Augmented Reality", or some similarly descriptive note. | |
// Let's get started! | |
// You'll need to import SceneKit to create and edit your objects and ARKit to place them in your augmented reality space. | |
import UIKit | |
import SceneKit | |
import ARKit | |
// Make sure you conform to the ARSCNViewDelegate protocol. | |
class ARTouchGestureViewController: UIViewController, ARSCNViewDelegate { | |
// Declare an ARSCNView | |
var sceneView: ARSCNView! | |
override func viewDidLoad() { | |
super.viewDidLoad() | |
// Set the view frame to match the bounds of your view controller, and add the sceneView as a subview to your view | |
// controller's view. | |
self.sceneView = ARSCNView(frame: self.view.frame) | |
self.view.addSubview(self.sceneView) | |
// Assign the sceneView's delegate to itself, and optionally display statistics at the bottom of the view. | |
sceneView.delegate = self | |
sceneView.showsStatistics = true | |
// Create an SCNScene object to be used later and create an SCNBox. This box will be the 3D object that will be | |
// displayed. Note that the dimensions of the box are in meters, so choose appropriate values depending on your | |
// goals. Also, chamfer refers to an object's right-angled edge. The chamferRadius parameter is similar to editing | |
// the corner radius of two dimensional views in Swift; it determines how rounded (if at all) the edges of the box are. | |
let scene = SCNScene() | |
let box = SCNBox(width: 0.2, height: 0.2, length: 0.2, chamferRadius: 0.01) | |
// Create an SCNMaterial object, and use the diffuse.contents property to edit the material's visual contents, in this | |
// case we are adding an image (marioSide) to all six sides of the box. Download an image from the web (or create your | |
// own), and add it to your project's Assets.xcassets folder. Give it whatever name you use in line 48, in this case, | |
// "marioSide". | |
let material = SCNMaterial() | |
material.diffuse.contents = UIImage(named: "marioSide") | |
// Now create an SCNNode. Assign the box to the node's geometry property, and assign the materials to the | |
// geometry.materials property. Assign an SCNVector3 to the node's position property. Note that the parameters for the | |
// vector are in meters, so select appropriate values. Also note that a positive value for the z-coordinate will place | |
// the object behind you. | |
let node = SCNNode() | |
node.geometry = box | |
node.geometry?.materials = [material] | |
node.position = SCNVector3(x: 0, y: 0.1, z: -0.7) | |
// Add the node as a child node to the root node's childNodes array. | |
scene.rootNode.addChildNode(node) | |
// Now we will create a tapGestureRecognizer to (you guessed it) recognize taps. Basically, this will be used to | |
// recognize when our object is tapped. It will use the selector to trigger the tap method (defined below), which | |
// will allow us to have our object perform some kind of action when it is tapped. Pass the tapGestureRecognizer | |
// as the parameter to your sceneView's addGestureRecognizer(_gestureRecognizer:) method to allow the object | |
// to recognize taps. See https://developer.apple.com/documentation/uikit/uitapgesturerecognizer for more details. | |
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(tap)) | |
self.sceneView.addGestureRecognizer(tapGestureRecognizer) | |
// Assign the scene to the scene property of the sceneView. Say that five times fast! | |
sceneView.scene = scene | |
} | |
override func viewWillAppear(_ animated: Bool) { | |
super.viewWillAppear(animated) | |
// Allow your device to track the world around a device.. This class uses the rear camera and track's a device's | |
// position and orientation and detects flat surfaces in the real world. | |
// See https://developer.apple.com/documentation/arkit/arworldtrackingconfiguration for more details. | |
let configuration = ARWorldTrackingConfiguration() | |
// Run the sceneView's session with the configuration you just declared. | |
sceneView.session.run(configuration) | |
} | |
override func viewWillDisappear(_ animated: Bool) { | |
super.viewWillDisappear(animated) | |
// Pause the sceneView's session when the user is not making use of the AR view. This allows the device to avoid using | |
// computationally expensive AR rendering. | |
sceneView.session.pause() | |
} | |
// Here's our tap method. Note the @objc mark. We need to explicitly expose our method to Objective-C. See | |
// https://developer.apple.com/library/content/documentation/Swift/Conceptual/BuildingCocoaApps/MixandMatch.html or | |
// https://stackoverflow.com/questions/44379348/the-use-of-swift-3-objc-inference-in-swift-4-mode-is-deprecated for | |
// more details. | |
@objc func tap(recognizer: UIGestureRecognizer) { | |
// Get the sceneView from the recognizer. | |
let sceneView = recognizer.view as! SCNView | |
// Set the recognizer's location as anywhere in the sceneView and register when our location is tapped. | |
let touchLocation = recognizer.location(in: sceneView) | |
let hitResults = sceneView.hitTest(touchLocation, options: [:]) | |
// If there is a result in hitResults, do something! | |
if !hitResults.isEmpty { | |
// Get the sceneView's node. | |
let node = hitResults[0].node | |
// Create an animatino. Set the SCNTransaction's animationDuration to be 0.5, or half a second. | |
SCNTransaction.animationDuration = 0.5 | |
// Move the node vertically from y = 0.1 to y = 0.5. | |
node.position = SCNVector3(x: 0, y: 0.5, z: -0.7) | |
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { | |
// Return the node to its original position. And that's it! | |
node.position = SCNVector3(x: 0, y: 0.1, z: -0.7) | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment