Skip to content

Instantly share code, notes, and snippets.

View eospi's full-sized avatar

Ethan Saadia eospi

View GitHub Profile
import MLCompute
struct Model {
let batchSize: Int
let imageSize: Int
let outSize: Int
let numberOfClasses: Int
let imagesTensor: MLCTensor
let labelsTensor: MLCTensor
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate {
@IBOutlet var sceneView: ARSCNView!
var anchorCount = 0
class ViewController: UIViewController {
@IBOutlet var arView: ARView!
var videoPlayer: AVPlayer!
override func viewDidLoad() {
super.viewDidLoad()
guard let path = Bundle.main.path(forResource: "myFileName", ofType: "mp4") else { return }
let videoURL = URL(fileURLWithPath: path)
import UIKit
import RealityKit
import Combine
class ViewController: UIViewController {
@IBOutlet var arView: ARView!
override func viewDidLoad() {
super.viewDidLoad()
let audioFilePath = "path/to/audio/file"
let entity = Entity()
do {
let resource = try AudioFileResource.load(named: audioFilePath, in: nil, inputMode: .spatial, loadingStrategy: .preload, shouldLoop: true)
let audioController = entity.prepareAudio(resource)
audioController.play()
// If you want to start playing right away, you can replace lines 7-8 with line 11 below
let _ = arView.trackedRaycast(from: arView.center, allowing: .existingPlaneInfinite, alignment: .horizontal, updateHandler: { results in
guard let result = results.first else { return }
let anchor = AnchorEntity(raycastResult: result)
anchor.addChild(entity)
self.arView.scene.addAnchor(anchor)
})
guard let query = arView.makeRaycastQuery(from: arView.center, allowing: .existingPlaneInfinite, alignment: .horizontal) else { return }
guard let raycastResult = arView.session.raycast(query).first else { return }
// set a transform to an existing entity
let transform = Transform(matrix: raycastResult.worldTransform)
entity.transform = transform
// or anchor an entity to an ARRaycastResult
let anchor = AnchorEntity(raycastResult: raycastResult)
anchor.addChild(entity)
@eospi
eospi / RealityKitGestures.swift
Created August 13, 2019 16:08
Gestures in RealityKit
import RealityKit
import UIKit
class ViewController: UIViewController {
let arView: ARView
func addGestures() {
// Use any entity. Here we're using a cube with size 0.5m
let entity = ModelEntity(mesh: MeshResource.generateBox(size: 0.5))
entity.generateCollisionShapes(recursive: true)
@eospi
eospi / loadEntityAsync.swift
Last active August 2, 2024 19:27
RealityKit Asynchronous Loading
import Combine
func loadEntityAsync() {
// Create an world anchor at the origin and add it to the scene
let anchor = AnchorEntity(world: [0,0,0])
arView.scene.addAnchor(anchor)
let usdzPath = "path/to/usdz/asset"
// Load the asset asynchronously
@eospi
eospi / loadEntitySync.swift
Last active August 12, 2019 20:16
RealityKit Synchronous Loading
func loadEntitySync() {
// Create an world anchor at the origin and add it to the scene
let anchor = AnchorEntity(world: [0,0,0])
arView.scene.addAnchor(anchor)
// Handle errors in case the file doesn't exist
do {
let usdzPath = "path/to/usdz/asset"
let modelEntity = try ModelEntity.loadModel(named: usdzPath)
anchor.addChild(modelEntity)