Skip to content

Instantly share code, notes, and snippets.

@giln
giln / ARSCNViewDelegate.swift
Created March 4, 2019 14:44
ARSCNViewDelegate
// MARK: - ARSCNViewDelegate
func renderer(_: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
guard let _ = anchor as? ARPlaneAnchor else { return nil }
// We return a special type of SCNNode for ARPlaneAnchors
return PlaneNode()
}
func renderer(_: SCNSceneRenderer, didAdd node: SCNNode, for anchor: ARAnchor) {
@giln
giln / PlaneNode.swift
Created March 4, 2019 14:44
PlaneNode
import ARKit
import SceneKit
public class PlaneNode: SCNNode {
// MARK: - Public functions
public func update(from planeAnchor: ARPlaneAnchor) {
// We need to create a new geometry each time because it does not seem to update correctly for physics
guard let device = MTLCreateSystemDefaultDevice(),
@giln
giln / SessionConfiguration.swift
Created March 4, 2019 14:43
SessionConfiguration
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Enable Horizontal plane detection
configuration.planeDetection = .horizontal
// We want to receive the frames from the video
sceneView.session.delegate = self
// Run the session with the configuration
@giln
giln / Output.swift
Created March 4, 2019 14:42
Output
// MARK: - Private functions
let handDetector = HandDetector()
private func startDetection() {
// To avoid force unwrap in VNImageRequestHandler
guard let buffer = currentBuffer else { return }
handDetector.performDetection(inputBuffer: buffer) { outputBuffer, _ in
// Here we are on a background thread
@giln
giln / Preview.swift
Created March 4, 2019 14:41
Preview
var previewView = UIImageView()
// MARK: - Lifecycle
override func loadView() {
super.loadView()
view = sceneView
// Create a session configuration
@giln
giln / HandDetector.swift
Created March 4, 2019 14:40
HandDetector
import CoreML
import Vision
public class HandDetector {
// MARK: - Variables
private let visionQueue = DispatchQueue(label: "com.viseo.ARML.visionqueue")
private lazy var predictionRequest: VNCoreMLRequest = {
// Load the ML model through its generated class and create a Vision request for it.
@giln
giln / VNImageRequestHandler.swift
Created March 4, 2019 14:39
VNImageRequestHandler
let visionQueue = DispatchQueue(label: "com.viseo.ARML.visionqueue")
private func startDetection() {
// To avoid force unwrap in VNImageRequestHandler
guard let buffer = currentBuffer else { return }
// Right orientation because the pixel data for image captured by an iOS device is encoded in the camera sensor's native landscape orientation
let requestHandler = VNImageRequestHandler(cvPixelBuffer: buffer, orientation: .right)
// We perform our CoreML Requests asynchronously.
@giln
giln / PredictionRequest.swift
Created March 4, 2019 14:38
predictionRequest
private lazy var predictionRequest: VNCoreMLRequest = {
// Load the ML model through its generated class and create a Vision request for it.
do {
let model = try VNCoreMLModel(for: HandModel().model)
let request = VNCoreMLRequest(model: model)
// This setting determines if images are scaled or cropped to fit our 224x224 input size. Here we try scaleFill so we don't cut part of the image.
request.imageCropAndScaleOption = VNImageCropAndScaleOption.scaleFill
return request
} catch {
@giln
giln / ARSessionDelegate.swift
Created March 4, 2019 14:36
ARSessionDelegate
// MARK: - ARSessionDelegate
var currentBuffer: CVPixelBuffer?
func session(_: ARSession, didUpdate frame: ARFrame) {
// We return early if currentBuffer is not nil or the tracking state of camera is not normal
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
@giln
giln / ARViewController.swift
Created March 4, 2019 14:34
ARViewController #2
import ARKit
class ARViewController: UIViewController, ARSessionDelegate {
// MARK: - Variables
let sceneView = ARSCNView()
// MARK: - Lifecycle
override func loadView() {