Skip to content

Instantly share code, notes, and snippets.

@hotpaw2
Last active February 25, 2024 19:08
Show Gist options
  • Save hotpaw2/f108a3c785c7287293d7e1e81390c20b to your computer and use it in GitHub Desktop.
Save hotpaw2/f108a3c785c7287293d7e1e81390c20b to your computer and use it in GitHub Desktop.
//
// MyMetalWaterfall.swift
// version 0.1.105 (updated for Swift 5)
//
// Demonstrates using a MetalKit compute shader to render a live waterfall RGB bitmap
// into a UIView
//
// This is a single file iOS app
//
// It includes AppDelegate for a minimal demonstration app
// that can be built without a Storyboard
//
// More than half of this file is an audio recording class to provide test input
//
// Requires the Metal shader in Shader.metal (attahed to comments near the end)
//
// Re-run after allowing microphone permission.
//
// Created by Ronald Nicholson ([email protected]) on 2/4/17.
// Copyright © 2017,2019 Ronald H Nicholson Jr. All rights reserved.
// http://www.nicholson.com/rhn/
// Distribution permission: BSD 2-clause
//
import UIKit
import MetalKit
import Accelerate
import AVFoundation
import AudioUnit
var globalAudioRecorder : RecordAudio? = nil
var spectrumArray = [Float](repeating: 0, count: bitmapWidth)
var enable = true
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate { // minimal app delegate
var window: UIWindow?
var myViewController: MyViewController?
func application(_ application: UIApplication, didFinishLaunchingWithOptions
launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
myViewController = MyViewController()
self.window = UIWindow(frame: UIScreen.main.bounds)
self.window?.rootViewController = myViewController!
self.window?.makeKeyAndVisible()
return true
}
func applicationWillResignActive(_ application: UIApplication) {
globalAudioRecorder?.stopRecording()
}
func applicationDidEnterBackground(_ application: UIApplication) { }
func applicationWillEnterForeground(_ application: UIApplication) { }
func applicationDidBecomeActive(_ application: UIApplication) {
globalAudioRecorder?.startRecording()
}
func applicationWillTerminate(_ application: UIApplication) { }
} // AppDelegate
let bitmapWidth = 256
let bitmapHeight = 256
class MyViewController: UIViewController {
var myRecorder : RecordAudio? = nil
var mySpectrumView : SpectrumView?
var myWaterfallView : WaterfallView?
var animationTimer : CADisplayLink!
override func viewDidLoad() {
super.viewDidLoad()
myRecorder = RecordAudio()
myRecorder!.startRecording()
globalAudioRecorder = myRecorder
self.view.backgroundColor = UIColor.lightGray
let r1 = CGRect(x: 64, y: 128, width: bitmapWidth, height: bitmapHeight)
myWaterfallView = WaterfallView()
myWaterfallView!.frame = r1
self.view.addSubview(myWaterfallView!)
let r2 = CGRect(x: 64, y: 64, width: bitmapWidth, height: 64)
mySpectrumView = SpectrumView()
mySpectrumView!.frame = r2
self.view.addSubview(mySpectrumView!)
animationTimer = CADisplayLink(target: self,
selector: #selector(self.updateViews) )
animationTimer.preferredFramesPerSecond = 120
animationTimer.add(to: RunLoop.current,
forMode: RunLoop.Mode.common )
}
override func viewDidAppear(_ animated: Bool) {
if let r = myRecorder {
if r.isRecording == false {
r.startRecording()
}
}
}
@objc func updateViews() {
guard (myRecorder != nil) else { return }
if enable && myRecorder!.isRecording {
makeSpectrumFromAudio(myRecorder!)
mySpectrumView?.setNeedsDisplay()
myWaterfallView?.setNeedsDisplay()
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
} // UIViewController
// MARK: -- Spectrum --
class SpectrumView: UIView {
var slowSpectrumArray = [Float](repeating: 0.0, count: bitmapWidth)
override func draw(_ rect: CGRect) {
let context : CGContext! = UIGraphicsGetCurrentContext()
let r0 : CGRect! = self.bounds
if true { // ** Rect **
context.setFillColor(red: 1.0, green: 1.0, blue: 1.0, alpha: 1.0);
context.fill(r0);
}
let n = bitmapWidth
let array = spectrumArray
let r : Float = 0.25
for i in 0 ..< n {
slowSpectrumArray[i] = r * array[i] + (1.0 - r) * slowSpectrumArray[i]
}
context.setFillColor(red: 0.2, green: 0.2, blue: 1.0, alpha: 1.0);
let h0 = CGFloat(r0.size.height)
let dx = (r0.size.width) / CGFloat(n)
if array.count >= n {
for i in 0 ..< n {
let y = h0 * CGFloat(1.0 - slowSpectrumArray[i])
let x = r0.origin.x + CGFloat(i) * dx
let h = h0 - y
let w = dx
// let r1 = CGRect(x: x + 20, y: y, width: w, height: h)
let r1 = CGRect(x: x, y: y, width: w, height: h)
context.fill(r1)
}
}
}
}
// MARK: -- WaterFall --
class WaterfallView: MTKView {
var frameCount = 0
private var queue: MTLCommandQueue!
private var cps: MTLComputePipelineState!
private var uniform_buffer: MTLBuffer!
required public init(coder: NSCoder) {
super.init(coder: coder)
initCommon()
}
override init(frame frameRect: CGRect, device: MTLDevice?) {
super.init(frame: frameRect, device: device)
initCommon()
}
private func initCommon() {
if device == nil {
device = MTLCreateSystemDefaultDevice()
}
framebufferOnly = false
self.preferredFramesPerSecond = 60
createUniformBuffer()
registerComputeShader()
}
#if IOS_SIMULATOR // stub out Metal for pre-iOS13 simulators
func computeShader() { }
func registerComputeShader() { }
func createUniformBuffer() { }
@objc func updateBitmap() { }
#else
//
func createUniformBuffer() {
let bytesPerPixel = 4 * MemoryLayout<Float>.size
let bytes = bitmapWidth * (1 + 2 * bitmapHeight) * bytesPerPixel
let options = MTLResourceOptions()
uniform_buffer = device!.makeBuffer(length: bytes * MemoryLayout<Float>.size,
options: options)
let bufferPointer = uniform_buffer.contents()
let sz = Int(bytes * MemoryLayout<Float>.size)
// memset(bufferPointer, Int32(0), sz)
bzero(bufferPointer, sz)
}
//
func registerComputeShader() {
queue = device!.makeCommandQueue()
let library = device!.makeDefaultLibrary()!
let kernel = library.makeFunction(name: "compute_func_1")!
do {
try cps = device!.makeComputePipelineState(function: kernel)
} catch {
// perhaps handle error
}
}
func computeShader() {
if let drawable = currentDrawable {
let commandBuffer = queue.makeCommandBuffer()
let commandEncoder = commandBuffer?.makeComputeCommandEncoder()
commandEncoder?.setComputePipelineState(cps)
commandEncoder?.setTexture(drawable.texture, index: 0)
commandEncoder?.setBuffer(uniform_buffer, offset: 0, index: 1)
let threadGroupCount = MTLSizeMake(8, 8, 1)
let threadGroups = MTLSizeMake(drawable.texture.width / threadGroupCount.width,
drawable.texture.height / threadGroupCount.height, 1)
commandEncoder?.dispatchThreadgroups(threadGroups,
threadsPerThreadgroup: threadGroupCount)
commandEncoder?.endEncoding()
commandBuffer?.present(drawable)
commandBuffer?.commit()
}
}
@objc func updateBitmap() {
let buffer2Pointer = uniform_buffer.contents()
let bytesPerPixel = 4 * MemoryLayout<Float>.size
let array = spectrumArray
// for j in 0 ..< bitmapHeight { }
if true {
let j = frameCount % bitmapHeight
for i in 0 ..< bitmapWidth {
var r : Float, g : Float, b : Float // RGB color components
// these are just some changing RGB colors to show bitmap animation
let v = (frameCount/2 + i) % bitmapWidth
let u = (frameCount + j) % bitmapHeight
r = Float(v) / Float(bitmapWidth)
g = Float(u) / Float(bitmapHeight)
b = Float((bitmapWidth - i) + (bitmapWidth - u)) / Float(bitmapWidth + bitmapHeight)
//
r = 0; b = 0; g = 0;
if i < spectrumArray.count {
let y = array[i]
r = y
g = y
b = y
}
//
var row = bitmapHeight - j
var pixel : [Float] = [ r, g, b, 1.0 ]
let offset0 = (row * bitmapWidth + i) * bytesPerPixel // * 16 bytes
memcpy(buffer2Pointer + offset0, &pixel, bytesPerPixel) // write 16 bytes
row += bitmapHeight
let offset1 = (row * bitmapWidth + i) * bytesPerPixel // * 16 bytes
memcpy(buffer2Pointer + offset1, &pixel, bytesPerPixel) // write 16 bytes
}
var pixel : [Float] = [Float(j), 0.0, 0.0, 0.0]
let top = ((2 * bitmapHeight) * bitmapWidth) * bytesPerPixel
memcpy(buffer2Pointer + top, &pixel, bytesPerPixel)
}
}
#endif // non SIMULATOR
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
enable = !enable
}
public override func draw(_ rect: CGRect) {
if enable {
computeShader()
frameCount += 1
performSelector(onMainThread: #selector(updateBitmap), with: nil, waitUntilDone: false)
}
}
} // WaterfallView: MTKView
let fftLen = 8 * bitmapWidth
fileprivate var fftSetup : FFTSetup? = nil
fileprivate var auBWindow = [Float](repeating: 1.0, count: 32768)
func doFFT_OnAudioBuffer(_ audioObject : RecordAudio) -> ([Float]) {
let log2N = UInt(round(log2f(Float(fftLen))))
var output = [Float](repeating: 0.0, count: fftLen)
guard let myAudio = globalAudioRecorder
else { return output }
if fftSetup == nil {
fftSetup = vDSP_create_fftsetup(log2N, FFTRadix(kFFTRadix2))
vDSP_blkman_window(&auBWindow, vDSP_Length(fftLen), 0)
}
var fcAudioU0 = [Float](repeating: 0.0, count: fftLen)
var fcAudioV0 = [Float](repeating: 0.0, count: fftLen)
var i = myAudio.circInIdx - 2 * fftLen
if i < 0 { i += circBuffSize }
for j in 0 ..< fftLen {
if i < 0 {
gTmp0 = 0
}
if i >= circBuffSize {
gTmp0 = 0
}
fcAudioU0[j] = audioObject.circBuffer[i]
i += 2 ; if i >= circBuffSize { i -= circBuffSize } // circular buffer
}
vDSP_vmul(fcAudioU0, 1, auBWindow, 1, &fcAudioU0, 1, vDSP_Length(fftLen/2))
var fcAudioUV = DSPSplitComplex(realp: &fcAudioU0, imagp: &fcAudioV0 )
vDSP_fft_zip(fftSetup!, &fcAudioUV, 1, log2N, Int32(FFT_FORWARD)); // FFT()
var tmpAuSpectrum = [Float](repeating: 0.0, count: fftLen)
vDSP_zvmags(&fcAudioUV, 1, &tmpAuSpectrum, 1, vDSP_Length(fftLen/2)) // abs()
var scale = 1024.0 / Float(fftLen)
vDSP_vsmul(&tmpAuSpectrum, 1, &scale, &output, 1, vDSP_Length(fftLen/2))
return (output)
}
var minx : Float = 1.0e12
var maxx : Float = -1.0e12
func makeSpectrumFromAudio(_ audioObject: RecordAudio) {
var magnitudeArray = doFFT_OnAudioBuffer(audioObject)
for i in 0 ..< bitmapWidth {
if i < magnitudeArray.count {
var x = (1024.0 + 64.0 * Float(i)) * magnitudeArray[i]
if x > maxx { maxx = x }
if x < minx { minx = x }
var y : Float = 0.0
if (x > minx) {
if (x < 1.0) { x = 1.0 }
let r = (logf(maxx - minx) - logf(1.0)) * 1.0
let u = (logf(x - minx) - logf(1.0))
y = u / r
}
spectrumArray[i] = y
}
}
}
// MARK: -- Audio --
//
// RecordAudio.swift
//
// This is a Swift 4.0 class
// that uses an AVAudioEngine tap-on-bus
// to record microphone audio input samples.
//
// Created by Ronald Nicholson on 10/21/16, rev 2019-06-22
// Copyright © 2017,2019 HotPaw Productions. All rights reserved.
// distribution: BSD 2-clause license
//
import Foundation
import AVFoundation
import AudioUnit
var gTmp0 = 0 // debug
final class RecordAudio: NSObject {
let audioEngine = AVAudioEngine()
var isRecording = false
var circBuffer = [Float](repeating: 0, count: circBuffSize) // for incoming samples
var circInIdx : Int = 0
func startRecording() {
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setCategory(AVAudioSession.Category.record,
mode: AVAudioSession.Mode.measurement,
options: [])
try audioSession.setPreferredSampleRate(48000.0)
try audioSession.setActive(true)
let inputNode = audioEngine.inputNode
self.audioEngine.stop()
inputNode.reset()
inputNode.removeTap(onBus: 0)
let bus = 0
let recordingFormat : AVAudioFormat = inputNode.inputFormat(forBus: bus)
// let sr = recordingFormat.sampleRate
let cc = recordingFormat.channelCount
let sampleFormat : AVAudioCommonFormat = recordingFormat.commonFormat
if sampleFormat != AVAudioCommonFormat.pcmFormatFloat32 { return }
inputNode.installTap(onBus: bus, bufferSize: 512, format: recordingFormat) {
(buffer, time) in
if let b2 = buffer.floatChannelData?[0] {
var j = self.circInIdx
let m = circBuffSize
for i in 0 ..< (512 * Int(cc)) {
self.circBuffer[j] = b2[i]
j += 1 ; if j >= m { j = 0 }
}
self.circInIdx = j
}
}
try audioEngine.start()
isRecording = true
} catch {
// handle errors?
}
}
func stopRecording() {
self.audioEngine.stop()
}
}
// MARK: -- Old Audio 3 --
//
// RecordAudio.swift
//
// This is a Swift 3.0 class
// that uses the iOS RemoteIO Audio Unit
// to record audio input samples,
// (should be instantiated as a singleton object.)
//
// Created by Ronald Nicholson on 10/21/16.
// Copyright © 2017 HotPaw Productions. All rights reserved.
// BSD 2-clause license
//
import Foundation
import AVFoundation
import AudioUnit
// call setupAudioSessionForRecording() during controlling view load
// call startRecording() to start recording in a later UI call
let circBuffSize = 4 * 32768 // lock-free circular fifo/buffer size
final class RecordAudio3: NSObject {
var audioUnit: AudioUnit? = nil
var sampleRate : Double = 48000.0 // default audio sample rate
var micPermission = false
var sessionActive = false
var isRecording = false
var circBuffer = [Float](repeating: 0, count: circBuffSize) // for incoming samples
var circInIdx : Int = 0
private var hwSRate = 48000.0 // guess of device hardware sample rate
private var micPermissionDispatchToken = 0
private var interrupted = false // for restart from audio interruption notification
func startRecording() {
if isRecording { return }
startAudioSession()
if sessionActive {
startAudioUnit()
}
}
var numberOfChannels: Int = 2
private let outputBus: UInt32 = 0
private let inputBus: UInt32 = 1
func startAudioUnit() {
var err: OSStatus = noErr
if self.audioUnit == nil {
setupAudioUnit() // setup once
}
guard let au = self.audioUnit
else { return }
err = AudioUnitInitialize(au)
gTmp0 = Int(err)
if err != noErr { return }
err = AudioOutputUnitStart(au) // start
gTmp0 = Int(err)
if err == noErr {
isRecording = true
}
}
func startAudioSession() {
if (sessionActive == false) {
// set and activate Audio Session
do {
let audioSession = AVAudioSession.sharedInstance()
if (micPermission == false) {
if (micPermissionDispatchToken == 0) {
micPermissionDispatchToken = 1
audioSession.requestRecordPermission({(granted: Bool)-> Void in
if granted {
self.micPermission = true
return
// check for this flag and call from UI loop if needed
} else {
gTmp0 += 1
// dispatch in main/UI thread an alert
// informing that mic permission is not switched on
}
})
}
}
if micPermission == false { return }
try audioSession.setCategory(AVAudioSession.Category.record,
mode: AVAudioSession.Mode.measurement,
options: [])
// choose 44100 or 48000 based on hardware rate
var preferredIOBufferDuration = 0.0053 // 5.3 milliseconds = 256 samples
hwSRate = audioSession.sampleRate // get native hardware rate
if hwSRate == 44100.0 { sampleRate = 44100.0 } // set session to hardware rate
if hwSRate == 44100.0 { preferredIOBufferDuration = 0.0058 } // for old devices
let desiredSampleRate = sampleRate
try audioSession.setPreferredSampleRate(desiredSampleRate)
try audioSession.setPreferredIOBufferDuration(preferredIOBufferDuration)
NotificationCenter.default.addObserver(
forName: AVAudioSession.interruptionNotification,
object: nil,
queue: nil,
using: myAudioSessionInterruptionHandler )
try audioSession.setActive(true)
sessionActive = true
// gTmp0 = Int(audioSession.sampleRate)
} catch /* let error as NSError */ {
// handle error here
}
}
}
private func setupAudioUnit() {
var componentDesc: AudioComponentDescription
= AudioComponentDescription(
componentType: OSType(kAudioUnitType_Output),
componentSubType: OSType(kAudioUnitSubType_RemoteIO),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: UInt32(0),
componentFlagsMask: UInt32(0) )
var osErr: OSStatus = noErr
let component: AudioComponent! = AudioComponentFindNext(nil, &componentDesc)
var tempAudioUnit: AudioUnit?
osErr = AudioComponentInstanceNew(component, &tempAudioUnit)
self.audioUnit = tempAudioUnit
guard let au = self.audioUnit
else { return }
// Enable I/O for input.
var one_ui32: UInt32 = 1
osErr = AudioUnitSetProperty(au,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
inputBus,
&one_ui32,
UInt32(MemoryLayout<UInt32>.size))
// Set format to 32-bit Floats, linear PCM
let nc = 2 // 2 channel stereo
var streamFormatDesc:AudioStreamBasicDescription = AudioStreamBasicDescription(
mSampleRate: Double(sampleRate),
mFormatID: kAudioFormatLinearPCM,
mFormatFlags: ( kAudioFormatFlagsNativeFloatPacked ),
mBytesPerPacket: UInt32(nc * MemoryLayout<Float>.size),
mFramesPerPacket: 1,
mBytesPerFrame: UInt32(nc * MemoryLayout<Float>.size),
mChannelsPerFrame: UInt32(nc),
mBitsPerChannel: UInt32(8 * (MemoryLayout<Float>.size)),
mReserved: UInt32(0)
)
osErr = AudioUnitSetProperty(au,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, outputBus,
&streamFormatDesc,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
osErr = AudioUnitSetProperty(au,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
inputBus,
&streamFormatDesc,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
var inputCallbackStruct
= AURenderCallbackStruct(inputProc: recordingCallback,
inputProcRefCon:
UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque()))
osErr = AudioUnitSetProperty(au,
AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback),
AudioUnitScope(kAudioUnitScope_Global),
inputBus,
&inputCallbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size))
// Ask CoreAudio to allocate buffers for us on render.
// Is this true by default?
osErr = AudioUnitSetProperty(au,
AudioUnitPropertyID(kAudioUnitProperty_ShouldAllocateBuffer),
AudioUnitScope(kAudioUnitScope_Output),
inputBus,
&one_ui32,
UInt32(MemoryLayout<UInt32>.size))
gTmp0 = Int(osErr)
}
let recordingCallback: AURenderCallback = { (
inRefCon,
ioActionFlags,
inTimeStamp,
inBusNumber,
frameCount,
ioData ) -> OSStatus in
let audioObject = unsafeBitCast(inRefCon, to: RecordAudio3.self)
var err: OSStatus = noErr
// set mData to nil, AudioUnitRender() should be allocating buffers
var bufferList = AudioBufferList(
mNumberBuffers: 1,
mBuffers: AudioBuffer(
mNumberChannels: UInt32(2),
mDataByteSize: 16,
mData: nil))
if let au = audioObject.audioUnit {
err = AudioUnitRender(au,
ioActionFlags,
inTimeStamp,
inBusNumber,
frameCount,
&bufferList)
}
audioObject.saveRecordedSamples( inputDataList: &bufferList,
frameCount: UInt32(frameCount) )
return 0
}
func saveRecordedSamples( // save RemoteIO mic input samples
inputDataList : UnsafeMutablePointer<AudioBufferList>,
frameCount : UInt32 )
{
let inputDataPtr = UnsafeMutableAudioBufferListPointer(inputDataList)
let mBuffers : AudioBuffer = inputDataPtr[0]
let count = Int(frameCount)
// from Microphone Input
let bufferPointer = UnsafeMutableRawPointer(mBuffers.mData)
if let bptr = bufferPointer {
let dataArray = bptr.assumingMemoryBound(to: Float.self)
var j = self.circInIdx
let m = circBuffSize
for i in 0..<(count) {
let x = Float(dataArray[i+i ]) // copy left channel sample
let y = Float(dataArray[i+i+1]) // copy right channel sample
circBuffer[j ] = x
circBuffer[j + 1] = y
j += 2 ; if j >= m { j = 0 } // into circular buffer
}
self.circInIdx = j // circular index will always be less than size
}
}
func stopRecording() {
AudioUnitUninitialize(self.audioUnit!)
isRecording = false
}
func myAudioSessionInterruptionHandler(notification: Notification) -> Void {
let interuptionDict = notification.userInfo
if let interuptionType = interuptionDict?[AVAudioSessionInterruptionTypeKey] {
let interuptionVal = AVAudioSession.InterruptionType(
rawValue: (interuptionType as AnyObject).uintValue )
if (interuptionVal == AVAudioSession.InterruptionType.began) {
if (isRecording) {
stopRecording()
isRecording = false
let audioSession = AVAudioSession.sharedInstance()
do {
try audioSession.setActive(false)
sessionActive = false
} catch {
}
interrupted = true
}
} else if (interuptionVal == AVAudioSession.InterruptionType.ended) {
if (interrupted) {
// potentially restart here
startRecording()
}
}
}
}
} // end of class RecordAudio
/*
//
// Shader.metal
//
#include <metal_stdlib>
using namespace metal;
#define bmapWidth (256)
#define bmapHeight (256)
struct Uniforms {
float array[bmapHeight*bmapWidth*4];
};
kernel void compute_func_1(texture2d<half, access::write> output [[texture(0)]],
constant Uniforms &uniforms [[buffer(1)]],
uint2 gid [[thread_position_in_grid]])
{
float w = output.get_width() ;
float h = output.get_height();
int x = float(bmapWidth ) * (gid.x / w);
int y = float(bmapHeight) * (gid.y / h);
int top = bmapWidth * (2 * bmapHeight) * 4;
int row = uniforms.array[top];
y += (bmapHeight - row);
int j = (y * bmapWidth + x) * 4; // 4 floats per pixel
half r = uniforms.array[j + 0];
half g = uniforms.array[j + 1];
half b = uniforms.array[j + 2];
output.write(half4(r, g, b, 1.0), gid);
}
*/
// eof
@Suplanus
Copy link

Thanks for sharing. Can you post a screenshot or video here? Would love to see the result ♥️

@kaaaaai
Copy link

kaaaaai commented Nov 24, 2021

Me too!

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment