Skip to content

Instantly share code, notes, and snippets.

@ynagatomo
Forked from arthurschiller/RealityRendererTest.swift
Last active October 12, 2024 20:57
Show Gist options
  • Save ynagatomo/e1b4e9f1e52cd561c7f94483aa708417 to your computer and use it in GitHub Desktop.
Save ynagatomo/e1b4e9f1e52cd561c7f94483aa708417 to your computer and use it in GitHub Desktop.
RealityRenderer Test (visionOS)
//
// RealityRendererView.swift
// RealityRendererTest
//
// Created by Arthur Schiller on 11.01.24.
//
// Change Log: by Yasuhito Nagatomo
// - Added ImageBasedLighting, Mar 2, 2024
// - Added Camera a rotation animation, Mar 2, 2024
import SwiftUI
import Metal
import MetalKit
import RealityKit
struct RealityRendererView: View {
@State private var iblResource: EnvironmentResource?
var body: some View {
MetalViewRepresentable(iblRes: iblResource)
.background(Color.red)
.task {
if let res = try? await EnvironmentResource(named: "Sunlight") {
iblResource = res
print("* Loaded IBL")
}
}
}
}
#Preview(windowStyle: .automatic) {
RealityRendererView()
}
struct MetalViewRepresentable: UIViewRepresentable {
let iblRes: EnvironmentResource?
func makeUIView(context: Context) -> MetalView {
return MetalView()
}
func updateUIView(_ uiView: MetalView, context: Context) {
if let iblRes {
uiView.updateIBLResource(iblRes)
}
}
}
class MetalView: UIView {
func updateIBLResource(_ iblRes: EnvironmentResource) {
print("* MetalView.update IBL")
realityRenderer?.lighting.resource = iblRes
realityRenderer?.lighting.intensityExponent = 13.0
}
override class var layerClass: AnyClass {
return CAMetalLayer.self
}
var metalLayer: CAMetalLayer {
return layer as! CAMetalLayer
}
var device: MTLDevice!
var displayLink: CADisplayLink!
var commandQueue: MTLCommandQueue!
var offscreenTexture: MTLTexture!
var imageTexture: MTLTexture!
var blitTestImage: Bool = false
private var realityRenderer: RealityKit.RealityRenderer?
private var lockFlag = false
private var previousTimeStamp: TimeInterval?
private let camera = PerspectiveCamera()
private let modelEntity = {
var material = PhysicallyBasedMaterial()
material.baseColor = PhysicallyBasedMaterial.BaseColor(tint: .green)
let model = ModelEntity(
mesh: .generateBox(size: 0.3, cornerRadius: 0.1),
materials: [
material // UnlitMaterial(color: .blue)
]
)
return model
}()
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
commonInit()
}
override init(frame: CGRect) {
super.init(frame: frame)
commonInit()
}
private func commonInit() {
device = MTLCreateSystemDefaultDevice()
metalLayer.device = device
metalLayer.pixelFormat = .bgra8Unorm
metalLayer.framebufferOnly = false
commandQueue = device.makeCommandQueue()
createCommandQueue(device: device)
startDisplayLink()
do {
let realityRenderer = try RealityKit.RealityRenderer()
realityRenderer.cameraSettings.colorBackground = .color(
.init(srgbRed: 0.5, green: 0.2, blue: 0.3, alpha: 1)
)
// realityRenderer.cameraSettings.colorBackground = .outputTexture()
modelEntity.orientation = simd_quatf(angle: Float.pi / 4,
axis: SIMD3<Float>(0, 1, 0))
realityRenderer.entities.append(modelEntity)
camera.orientation = simd_quatf(angle: 0, // Float.pi / 4,
axis: SIMD3<Float>(0, 1, 0))
camera.position = SIMD3<Float>(0, 0, 0)
realityRenderer.entities.append(camera)
realityRenderer.activeCamera = camera
self.realityRenderer = realityRenderer
} catch {
print(error)
}
}
private func setupOffscreenTexture(withDrawable drawable: CAMetalDrawable) {
let texture = drawable.texture
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(
pixelFormat: .bgra8Unorm,
width: texture.width,
height: texture.height,
mipmapped: false
)
textureDescriptor.usage = [.shaderRead, .shaderWrite, .renderTarget]
offscreenTexture = device.makeTexture(descriptor: textureDescriptor)
}
private func loadTextureFromAssetCatalog(named imageName: String, device: MTLDevice) -> MTLTexture? {
let textureLoader = MTKTextureLoader(device: device)
do {
let texture = try textureLoader.newTexture(name: imageName, scaleFactor: 1.0, bundle: nil, options: nil)
return texture
} catch {
print("Error loading texture from asset catalog: \(error)")
return nil
}
}
//MARK: Builders
private func createCommandQueue(device: MTLDevice) {
commandQueue = device.makeCommandQueue()
}
private func startDisplayLink() {
displayLink = CADisplayLink(target: self, selector: #selector(drawFrame))
displayLink.add(to: .current, forMode: .default)
}
@objc private func drawFrame() {
guard
!lockFlag,
let drawable = metalLayer.nextDrawable()
else {
return
}
print("")
lockFlag = true
modelEntity.position.z = -0.5
camera.orientation *= simd_quatf(angle: 0.01,
axis: SIMD3<Float>(0, 1, 0))
// camera.look(at: modelEntity.position, from: .zero, relativeTo: nil)
if imageTexture == nil {
guard let texture = loadTextureFromAssetCatalog(named: "testTexture", device: device) else {
fatalError()
}
imageTexture = texture
}
if offscreenTexture == nil || offscreenTexture.width != drawable.texture.width || offscreenTexture.height != drawable.texture.height {
setupOffscreenTexture(withDrawable: drawable)
}
let commandBuffer = commandQueue.makeCommandBuffer()!
let timestamp = displayLink.timestamp
let deltaTime: TimeInterval = {
guard let previousTimeStamp else {
return 0
}
return timestamp - previousTimeStamp
}()
let event = device.makeEvent()!
print("Delta time: \(deltaTime)")
do {
if let realityRenderer {
let descriptor = RealityKit.RealityRenderer.CameraOutput.Descriptor.singleProjection(
colorTexture: offscreenTexture // can I render this directly into the drawable?
)
try realityRenderer.updateAndRender(
deltaTime: deltaTime,
cameraOutput: .init(descriptor),
whenScheduled: { _ in
print("Render scheduled for timestamp: \(timestamp)")
},
onComplete: { renderer in
print("Render complete for timestamp: \(timestamp)")
},
actionsBeforeRender: [
],
actionsAfterRender: [
.signal(event, value: 1)
]
)
}
} catch {
print("Error: \(error)")
}
commandBuffer.encodeWaitForEvent(event, value: 1)
let blitEncoder = commandBuffer.makeBlitCommandEncoder()!
// Blit the texture to the offscreenTexture's texture -> just for testing, not important
if blitTestImage {
blitEncoder.copy(from: imageTexture, sourceSlice: 0, sourceLevel: 0, sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0), sourceSize: MTLSize(width: imageTexture.width, height: imageTexture.height, depth: 1), to: offscreenTexture, destinationSlice: 0, destinationLevel: 0, destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
}
// Blit the offscreenTexture to the drawable texture
blitEncoder.copy(from: offscreenTexture, sourceSlice: 0, sourceLevel: 0, sourceOrigin: MTLOrigin(x: 0, y: 0, z: 0), sourceSize: MTLSize(width: offscreenTexture.width, height: offscreenTexture.height, depth: 1), to: drawable.texture, destinationSlice: 0, destinationLevel: 0, destinationOrigin: MTLOrigin(x: 0, y: 0, z: 0))
blitEncoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
commandBuffer.waitUntilCompleted()
print("Render Completed for timestamp: \(timestamp)")
print("---- ✅ ----")
previousTimeStamp = timestamp
lockFlag = false
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment