Skip to content

Instantly share code, notes, and snippets.

@joelreymont
Forked from hotpaw2/RecordAudio.swift
Created January 18, 2017 16:35

Revisions

  1. @hotpaw2 hotpaw2 revised this gist Oct 23, 2016. 1 changed file with 10 additions and 8 deletions.
    18 changes: 10 additions & 8 deletions RecordAudio.swift
    Original file line number Diff line number Diff line change
    @@ -232,21 +232,23 @@ final class RecordAudio: NSObject {
    if let interuptionType = interuptionDict?[AVAudioSessionInterruptionTypeKey] {
    let interuptionVal = AVAudioSessionInterruptionType(
    rawValue: (interuptionType as AnyObject).uintValue )
    if (interuptionVal == AVAudioSessionInterruptionType.began) {
    if interuptionVal == AVAudioSessionInterruptionType.began {
    // [self beginInterruption];
    if (isRecording) {
    if isRecording {
    auAudioUnit.stopHardware()
    isRecording = false
    audioInterrupted = true
    }
    } else if (interuptionVal == AVAudioSessionInterruptionType.ended) {
    } else if interuptionVal == AVAudioSessionInterruptionType.ended {
    // [self endInterruption];
    if (audioInterrupted && audioSessionActive == false) {
    let audioSession = AVAudioSession.sharedInstance()
    if audioInterrupted {
    do {
    try audioSession.setActive(true)
    audioSessionActive = true
    if (auAudioUnit.renderResourcesAllocated == false) {
    if audioSessionActive == false {
    let audioSession = AVAudioSession.sharedInstance()
    try audioSession.setActive(true)
    audioSessionActive = true
    }
    if auAudioUnit.renderResourcesAllocated == false {
    try auAudioUnit.allocateRenderResources()
    }
    try auAudioUnit.startHardware()
  2. @hotpaw2 hotpaw2 revised this gist Oct 23, 2016. 1 changed file with 1 addition and 8 deletions.
    9 changes: 1 addition & 8 deletions RecordAudio.swift
    Original file line number Diff line number Diff line change
    @@ -237,18 +237,11 @@ final class RecordAudio: NSObject {
    if (isRecording) {
    auAudioUnit.stopHardware()
    isRecording = false
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(false)
    audioSessionActive = false
    } catch {
    // placeholder for error handling
    }
    audioInterrupted = true
    }
    } else if (interuptionVal == AVAudioSessionInterruptionType.ended) {
    // [self endInterruption];
    if (audioInterrupted) {
    if (audioInterrupted && audioSessionActive == false) {
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(true)
  3. @hotpaw2 hotpaw2 revised this gist Oct 21, 2016. 1 changed file with 122 additions and 120 deletions.
    242 changes: 122 additions & 120 deletions RecordAudio.swift
    Original file line number Diff line number Diff line change
    @@ -1,91 +1,108 @@
    //
    // ToneOutputUnit.swift
    // RecordAudio.swift
    //
    // This is a Swift 2.x class (which should be instantiated as a singleton object)
    // that can record samples of audio input on iOS devices
    // This is a Swift 3.0 class
    // that uses the iOS Audio Unit v3 API and RemoteIO Audio Unit
    // to record audio input samples,
    // (should be instantiated as a singleton object.)
    //
    // Created by Ronald Nicholson rhn@nicholson.com on 5/01/2016.
    // http://www.nicholson.com/rhn/
    // Copyright © 2016 Ronald H Nicholson, Jr. All rights reserved.
    // Created by Ronald Nicholson on 10/21/16.
    // Copyright © 2016 HotPaw Productions. All rights reserved.
    // BSD 2-clause license
    //

    import Foundation
    import AVFoundation
    import AudioUnit

    // call setupAudioSessionForRecording() during controlling view load
    // call startRecording() to start recording in a later UI call

    final class RecordAudio: NSObject {

    var auAudioUnit: AUAudioUnit! = nil

    var enableRecord = true
    var sessionActive = false
    var audioSetupDone = false
    var running = false

    var sampleRate : Double = 44100.0 // desired audio sample rate
    var enableRecording = true
    var audioSessionActive = false
    var audioSetupComplete = false
    var isRecording = false

    var f0 = 880.0 // default frequency of tone
    var v0 = 16383.0 // default volume of tone
    var sampleRate : Double = 48000.0 // desired audio sample rate

    let mBufferSize = 8192 // for Audio Unit AudioBufferList mData buffer

    let cirBuffSize = 32768 // lock-free circular fifo/buffer size
    var circBuffer = [Int16](count: 32768, repeatedValue: 0)
    let circBuffSize = 32768 // lock-free circular fifo/buffer size
    var circBuffer = [Float](repeating: 0, count: 32768)
    var circInIdx : Int = 0 // sample input index
    var circOutIdx : Int = 0 // sample output index

    private var micPermission = false
    private var micPermissionDispatchToken: dispatch_once_t = 0
    private var interrupted = false // for restart from audio interruption notification
    var audioLevel : Float = 0.0

    private var micPermissionRequested = false
    private var micPermissionGranted = false

    // for restart from audio interruption notification
    private var audioInterrupted = false

    private var renderBlock : AURenderBlock? = nil

    func startRecording() {

    if running { return }

    self.enableRecord = true
    if isRecording { return }

    if (sessionActive == false) {
    if audioSessionActive == false {
    // configure and activate Audio Session, this might change the sampleRate
    setupAudioSession()
    setupAudioSessionForRecording()
    }

    guard micPermissionGranted && audioSessionActive else { return }

    let audioFormat = AVAudioFormat(
    commonFormat: AVAudioCommonFormat.PCMFormatInt16, // short int samples
    sampleRate: Double(sampleRate),
    channels:AVAudioChannelCount(2),
    interleaved: true ) // interleaved stereo
    commonFormat: AVAudioCommonFormat.pcmFormatInt16, // pcmFormatInt16, pcmFormatFloat32,
    sampleRate: Double(sampleRate), // 44100.0 48000.0
    channels:AVAudioChannelCount(2), // 1 or 2
    interleaved: true ) // true for interleaved stereo

    if (auAudioUnit == nil) {
    setupRemoteIOAudioUnit(audioFormat)
    setupRemoteIOAudioUnitForRecord(audioFormat: audioFormat)
    }

    // not running, so start hardware
    let renderBlock = auAudioUnit.renderBlock
    renderBlock = auAudioUnit.renderBlock // returns AURenderBlock()

    if (enableRecord && micPermission && audioSetupDone && sessionActive) {
    let pcmBufferSize : UInt32 = UInt32(mBufferSize)
    let inputBuffer = AVAudioPCMBuffer(
    PCMFormat: audioFormat, frameCapacity: pcmBufferSize)
    if ( enableRecording
    && micPermissionGranted
    && audioSetupComplete
    && audioSessionActive
    && isRecording == false ) {

    auAudioUnit.isInputEnabled = true

    auAudioUnit.inputEnabled = true
    auAudioUnit.inputHandler = { // AUInputHandler?
    (actionFlags, timestamp, frameCount, inputBusNumber) -> Void in
    auAudioUnit.outputProvider = { // AURenderPullInputBlock()

    let err : OSStatus =
    renderBlock(actionFlags, timestamp,
    AUAudioFrameCount(frameCount), Int(inputBusNumber),
    inputBuffer.mutableAudioBufferList, nil)
    (actionFlags, timestamp, frameCount, inputBusNumber, inputData) -> AUAudioUnitStatus in

    if err == noErr {
    // save samples from current input buffer to circular buffer
    self.copyMicrophoneInputSamples( inputBuffer.mutableAudioBufferList,
    frameCount: UInt32(frameCount) )
    if let block = self.renderBlock { // AURenderBlock?
    let err : OSStatus = block(actionFlags,
    timestamp,
    frameCount,
    1,
    inputData,
    .none)
    if err == noErr {
    // save samples from current input buffer to circular buffer
    self.recordMicrophoneInputSamples(
    inputDataList: inputData,
    frameCount: UInt32(frameCount) )
    }
    }
    let err2 : AUAudioUnitStatus = noErr
    return err2
    }

    do {
    circInIdx = 0 // initialize circular buffer pointers
    circOutIdx = 0
    try auAudioUnit.allocateRenderResources()
    try auAudioUnit.startHardware() // equivalent to AudioOutputUnitStart ???
    running = true
    isRecording = true

    } catch {
    // placeholder for error handling
    @@ -95,116 +112,99 @@ final class RecordAudio: NSObject {

    func stopRecording() {

    if (running) {
    if (isRecording) {
    auAudioUnit.stopHardware()
    running = false
    isRecording = false
    }
    if (sessionActive) {
    if (audioSessionActive) {
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(false)
    } catch /* let error as NSError */ {
    }
    sessionActive = false
    audioSessionActive = false
    }
    }

    private func copyMicrophoneInputSamples( // process RemoteIO Buffer from mic input
    private func recordMicrophoneInputSamples( // process RemoteIO Buffer from mic input
    inputDataList : UnsafeMutablePointer<AudioBufferList>,
    frameCount : UInt32 )
    {
    let inputDataPtr = UnsafeMutableAudioBufferListPointer(inputDataList)
    let mBuffers : AudioBuffer = inputDataPtr[0]
    let count = Int(frameCount)

    // Microphone Input Analysis
    let data = UnsafePointer<Int16>(mBuffers.mData)
    let dataArray = UnsafeBufferPointer<Int16>(
    start:data,
    count: Int(mBuffers.mDataByteSize)/sizeof(Int16) ) // words
    let bufferPointer = UnsafeMutableRawPointer(mBuffers.mData)

    var j = self.circInIdx // current circular array input index
    let n = self.cirBuffSize
    for i in 0..<(count/2) {
    self.circBuffer[j ] = dataArray[i+i ] // copy left channel sample
    self.circBuffer[j + 1] = dataArray[i+i+1] // copy right channel sample
    j += 2 ; if j >= n { j = 0 } // into circular buffer
    }
    OSMemoryBarrier(); // C11 call from libkern/OSAtomic.h
    self.circInIdx = j // circular index will always be less than size
    }

    var measuredMicVol : Float = 0.0
    let n = self.circBuffSize
    var audioLevelSum : Float = 0.0
    if let bptr = bufferPointer?.assumingMemoryBound(to: Int16.self) {
    for i in 0..<(count/2) {
    // Save samples in circular buffer for latter processing
    self.circBuffer[j ] = Float(bptr[i+i ]) // Stereo Left
    self.circBuffer[j + 1] = Float(bptr[i+i+1]) // Stereo Right
    j += 2 ; if j >= n { j = 0 } // Circular buffer looping
    // Microphone Input Analysis
    let x = Float(bptr[i+i ])
    let y = Float(bptr[i+i+1])
    audioLevelSum += x * x + y * y

    func dataAvailable(enough : Int) -> Bool {
    let buff = self.circBuffer
    var idx = self.circOutIdx
    var d = self.circInIdx - idx
    // set ttd to always try to consume more data
    // than can be produced during about 1 measurement timer interval
    if d < 0 { d = d + self.cirBuffSize }
    if d >= enough { // enough data in fifo
    var sum = 0.0
    for _ in 0..<enough {
    // read circular buffer and increment circular index
    let x = Double(buff[idx])
    idx = idx + 1 ; if idx >= 32768 { idx = 0 }
    // calculate total energy in buffer
    sum = sum + (x * x)
    }
    self.circOutIdx = idx
    measuredMicVol = sqrt( Float(sum) / Float(enough) ) // scaled volume
    return(true)
    }
    return(false)
    OSMemoryBarrier(); // from libkern/OSAtomic.h
    self.circInIdx = j // circular index will always be less than size
    if audioLevelSum > 0.0 && count > 0 {
    audioLevel = logf(audioLevelSum / Float(count))
    }
    }

    // set up and activate Audio Session
    private func setupAudioSession() {
    func setupAudioSessionForRecording() {
    do {

    let audioSession = AVAudioSession.sharedInstance()

    if (enableRecord && micPermission == false) {
    dispatch_once(&micPermissionDispatchToken) {
    if (micPermissionGranted == false) {
    if (micPermissionRequested == false) {
    micPermissionRequested = true
    audioSession.requestRecordPermission({(granted: Bool)-> Void in
    if granted {
    self.micPermission = true
    self.micPermissionGranted = true
    self.startRecording()
    return
    } else {
    self.enableRecord = false
    self.enableRecording = false
    // dispatch in main/UI thread an alert
    // informing that mic permission is not switched on
    }
    })
    }
    return
    }

    if enableRecord {
    if enableRecording {
    try audioSession.setCategory(AVAudioSessionCategoryRecord)
    }
    var preferredIOBufferDuration = 0.0058 // 5.8 milliseconds = 256 samples
    let hwSRate = audioSession.sampleRate // get native hardware rate
    if hwSRate == 48000.0 { sampleRate = 48000.0 } // fix for iPhone 6s
    if hwSRate == 48000.0 { preferredIOBufferDuration = 0.0053 }
    try audioSession.setPreferredSampleRate(sampleRate)
    let preferredIOBufferDuration = 0.0053 // 5.3 milliseconds = 256 samples
    try audioSession.setPreferredSampleRate(sampleRate) // at 48000.0
    try audioSession.setPreferredIOBufferDuration(preferredIOBufferDuration)

    NSNotificationCenter.defaultCenter().addObserverForName(
    AVAudioSessionInterruptionNotification,
    object: nil, queue: nil,
    usingBlock: myAudioSessionInterruptionHandler)
    NotificationCenter.default.addObserver(
    forName: NSNotification.Name.AVAudioSessionInterruption,
    object: nil,
    queue: nil,
    using: myAudioSessionInterruptionHandler )

    try audioSession.setActive(true)
    sessionActive = true
    audioSessionActive = true
    } catch /* let error as NSError */ {
    // placeholder for error handling
    }
    }

    // find and set up the sample format for the RemoteIO Audio Unit
    private func setupRemoteIOAudioUnit(audioFormat : AVAudioFormat) {
    private func setupRemoteIOAudioUnitForRecord(audioFormat : AVAudioFormat) {

    do {
    let audioComponentDescription = AudioComponentDescription(
    @@ -221,48 +221,50 @@ final class RecordAudio: NSObject {
    let bus1 = auAudioUnit.outputBusses[1]

    try bus1.setFormat(audioFormat) // for microphone bus
    audioSetupDone = true
    audioSetupComplete = true
    } catch /* let error as NSError */ {
    // placeholder for error handling
    }
    }

    private func myAudioSessionInterruptionHandler(notification: NSNotification) {
    private func myAudioSessionInterruptionHandler(notification: Notification) -> Void {
    let interuptionDict = notification.userInfo
    if let interuptionType = interuptionDict?[AVAudioSessionInterruptionTypeKey] {
    let interuptionVal = AVAudioSessionInterruptionType(
    rawValue: interuptionType.unsignedIntegerValue )
    if (interuptionVal == AVAudioSessionInterruptionType.Began) {
    rawValue: (interuptionType as AnyObject).uintValue )
    if (interuptionVal == AVAudioSessionInterruptionType.began) {
    // [self beginInterruption];
    if (running) {
    if (isRecording) {
    auAudioUnit.stopHardware()
    running = false
    isRecording = false
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(false)
    sessionActive = false
    audioSessionActive = false
    } catch {
    // placeholder for error handling
    }
    interrupted = true
    audioInterrupted = true
    }
    } else if (interuptionVal == AVAudioSessionInterruptionType.Ended) {
    } else if (interuptionVal == AVAudioSessionInterruptionType.ended) {
    // [self endInterruption];
    if (interrupted) {
    if (audioInterrupted) {
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(true)
    sessionActive = true
    audioSessionActive = true
    if (auAudioUnit.renderResourcesAllocated == false) {
    try auAudioUnit.allocateRenderResources()
    }
    try auAudioUnit.startHardware()
    running = true
    isRecording = true
    } catch {
    // placeholder for error handling
    }
    }
    }
    }
    }
    }
    } // end of RecordAudio class

    // eof
  4. @hotpaw2 hotpaw2 revised this gist May 2, 2016. 1 changed file with 1 addition and 0 deletions.
    1 change: 1 addition & 0 deletions RecordAudio.swift
    Original file line number Diff line number Diff line change
    @@ -130,6 +130,7 @@ final class RecordAudio: NSObject {
    self.circBuffer[j + 1] = dataArray[i+i+1] // copy right channel sample
    j += 2 ; if j >= n { j = 0 } // into circular buffer
    }
    OSMemoryBarrier(); // C11 call from libkern/OSAtomic.h
    self.circInIdx = j // circular index will always be less than size
    }

  5. @hotpaw2 hotpaw2 created this gist May 2, 2016.
    267 changes: 267 additions & 0 deletions RecordAudio.swift
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,267 @@
    //
    // ToneOutputUnit.swift
    //
    // This is a Swift 2.x class (which should be instantiated as a singleton object)
    // that can record samples of audio input on iOS devices
    //
    // Created by Ronald Nicholson rhn@nicholson.com on 5/01/2016.
    // http://www.nicholson.com/rhn/
    // Copyright © 2016 Ronald H Nicholson, Jr. All rights reserved.
    // BSD 2-clause license
    //

    final class RecordAudio: NSObject {

    var auAudioUnit: AUAudioUnit! = nil

    var enableRecord = true
    var sessionActive = false
    var audioSetupDone = false
    var running = false

    var sampleRate : Double = 44100.0 // desired audio sample rate

    var f0 = 880.0 // default frequency of tone
    var v0 = 16383.0 // default volume of tone

    let mBufferSize = 8192 // for Audio Unit AudioBufferList mData buffer

    let cirBuffSize = 32768 // lock-free circular fifo/buffer size
    var circBuffer = [Int16](count: 32768, repeatedValue: 0)
    var circInIdx : Int = 0 // sample input index
    var circOutIdx : Int = 0 // sample output index

    private var micPermission = false
    private var micPermissionDispatchToken: dispatch_once_t = 0
    private var interrupted = false // for restart from audio interruption notification

    func startRecording() {

    if running { return }

    self.enableRecord = true

    if (sessionActive == false) {
    // configure and activate Audio Session, this might change the sampleRate
    setupAudioSession()
    }

    let audioFormat = AVAudioFormat(
    commonFormat: AVAudioCommonFormat.PCMFormatInt16, // short int samples
    sampleRate: Double(sampleRate),
    channels:AVAudioChannelCount(2),
    interleaved: true ) // interleaved stereo

    if (auAudioUnit == nil) {
    setupRemoteIOAudioUnit(audioFormat)
    }

    // not running, so start hardware
    let renderBlock = auAudioUnit.renderBlock

    if (enableRecord && micPermission && audioSetupDone && sessionActive) {
    let pcmBufferSize : UInt32 = UInt32(mBufferSize)
    let inputBuffer = AVAudioPCMBuffer(
    PCMFormat: audioFormat, frameCapacity: pcmBufferSize)

    auAudioUnit.inputEnabled = true
    auAudioUnit.inputHandler = { // AUInputHandler?
    (actionFlags, timestamp, frameCount, inputBusNumber) -> Void in

    let err : OSStatus =
    renderBlock(actionFlags, timestamp,
    AUAudioFrameCount(frameCount), Int(inputBusNumber),
    inputBuffer.mutableAudioBufferList, nil)

    if err == noErr {
    // save samples from current input buffer to circular buffer
    self.copyMicrophoneInputSamples( inputBuffer.mutableAudioBufferList,
    frameCount: UInt32(frameCount) )
    }
    }

    do {
    circInIdx = 0 // initialize circular buffer pointers
    circOutIdx = 0
    try auAudioUnit.allocateRenderResources()
    try auAudioUnit.startHardware() // equivalent to AudioOutputUnitStart ???
    running = true

    } catch {
    // placeholder for error handling
    }
    }
    }

    func stopRecording() {

    if (running) {
    auAudioUnit.stopHardware()
    running = false
    }
    if (sessionActive) {
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(false)
    } catch /* let error as NSError */ {
    }
    sessionActive = false
    }
    }

    private func copyMicrophoneInputSamples( // process RemoteIO Buffer from mic input
    inputDataList : UnsafeMutablePointer<AudioBufferList>,
    frameCount : UInt32 )
    {
    let inputDataPtr = UnsafeMutableAudioBufferListPointer(inputDataList)
    let mBuffers : AudioBuffer = inputDataPtr[0]
    let count = Int(frameCount)

    // Microphone Input Analysis
    let data = UnsafePointer<Int16>(mBuffers.mData)
    let dataArray = UnsafeBufferPointer<Int16>(
    start:data,
    count: Int(mBuffers.mDataByteSize)/sizeof(Int16) ) // words

    var j = self.circInIdx // current circular array input index
    let n = self.cirBuffSize
    for i in 0..<(count/2) {
    self.circBuffer[j ] = dataArray[i+i ] // copy left channel sample
    self.circBuffer[j + 1] = dataArray[i+i+1] // copy right channel sample
    j += 2 ; if j >= n { j = 0 } // into circular buffer
    }
    self.circInIdx = j // circular index will always be less than size
    }

    var measuredMicVol : Float = 0.0

    func dataAvailable(enough : Int) -> Bool {
    let buff = self.circBuffer
    var idx = self.circOutIdx
    var d = self.circInIdx - idx
    // set ttd to always try to consume more data
    // than can be produced during about 1 measurement timer interval
    if d < 0 { d = d + self.cirBuffSize }
    if d >= enough { // enough data in fifo
    var sum = 0.0
    for _ in 0..<enough {
    // read circular buffer and increment circular index
    let x = Double(buff[idx])
    idx = idx + 1 ; if idx >= 32768 { idx = 0 }
    // calculate total energy in buffer
    sum = sum + (x * x)
    }
    self.circOutIdx = idx
    measuredMicVol = sqrt( Float(sum) / Float(enough) ) // scaled volume
    return(true)
    }
    return(false)
    }

    // set up and activate Audio Session
    private func setupAudioSession() {
    do {

    let audioSession = AVAudioSession.sharedInstance()

    if (enableRecord && micPermission == false) {
    dispatch_once(&micPermissionDispatchToken) {
    audioSession.requestRecordPermission({(granted: Bool)-> Void in
    if granted {
    self.micPermission = true
    self.startRecording()
    return
    } else {
    self.enableRecord = false
    // dispatch in main/UI thread an alert
    // informing that mic permission is not switched on
    }
    })
    }
    }

    if enableRecord {
    try audioSession.setCategory(AVAudioSessionCategoryRecord)
    }
    var preferredIOBufferDuration = 0.0058 // 5.8 milliseconds = 256 samples
    let hwSRate = audioSession.sampleRate // get native hardware rate
    if hwSRate == 48000.0 { sampleRate = 48000.0 } // fix for iPhone 6s
    if hwSRate == 48000.0 { preferredIOBufferDuration = 0.0053 }
    try audioSession.setPreferredSampleRate(sampleRate)
    try audioSession.setPreferredIOBufferDuration(preferredIOBufferDuration)

    NSNotificationCenter.defaultCenter().addObserverForName(
    AVAudioSessionInterruptionNotification,
    object: nil, queue: nil,
    usingBlock: myAudioSessionInterruptionHandler)

    try audioSession.setActive(true)
    sessionActive = true
    } catch /* let error as NSError */ {
    // placeholder for error handling
    }
    }

    // find and set up the sample format for the RemoteIO Audio Unit
    private func setupRemoteIOAudioUnit(audioFormat : AVAudioFormat) {

    do {
    let audioComponentDescription = AudioComponentDescription(
    componentType: kAudioUnitType_Output,
    componentSubType: kAudioUnitSubType_RemoteIO,
    componentManufacturer: kAudioUnitManufacturer_Apple,
    componentFlags: 0,
    componentFlagsMask: 0 )


    try auAudioUnit = AUAudioUnit(componentDescription: audioComponentDescription)

    // bus 1 is for data that the microphone exports out to the handler block
    let bus1 = auAudioUnit.outputBusses[1]

    try bus1.setFormat(audioFormat) // for microphone bus
    audioSetupDone = true
    } catch /* let error as NSError */ {
    // placeholder for error handling
    }
    }

    private func myAudioSessionInterruptionHandler(notification: NSNotification) {
    let interuptionDict = notification.userInfo
    if let interuptionType = interuptionDict?[AVAudioSessionInterruptionTypeKey] {
    let interuptionVal = AVAudioSessionInterruptionType(
    rawValue: interuptionType.unsignedIntegerValue )
    if (interuptionVal == AVAudioSessionInterruptionType.Began) {
    // [self beginInterruption];
    if (running) {
    auAudioUnit.stopHardware()
    running = false
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(false)
    sessionActive = false
    } catch {
    // placeholder for error handling
    }
    interrupted = true
    }
    } else if (interuptionVal == AVAudioSessionInterruptionType.Ended) {
    // [self endInterruption];
    if (interrupted) {
    let audioSession = AVAudioSession.sharedInstance()
    do {
    try audioSession.setActive(true)
    sessionActive = true
    if (auAudioUnit.renderResourcesAllocated == false) {
    try auAudioUnit.allocateRenderResources()
    }
    try auAudioUnit.startHardware()
    running = true
    } catch {
    // placeholder for error handling
    }
    }
    }
    }
    }
    }