-
-
Save joelreymont/219651152f5b61e1db24b37dea3e390b to your computer and use it in GitHub Desktop.
This is a quick&dirty example of a Swift 3.0 class that can read buffers of microphone input samples using iOS RemoteIO with the Audio Unit v3 API
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// | |
// ToneOutputUnit.swift | |
// | |
// This is a Swift 2.x class (which should be instantiated as a singleton object) | |
// that can record samples of audio input on iOS devices | |
// | |
// Created by Ronald Nicholson [email protected] on 5/01/2016. | |
// http://www.nicholson.com/rhn/ | |
// Copyright © 2016 Ronald H Nicholson, Jr. All rights reserved. | |
// BSD 2-clause license | |
// | |
final class RecordAudio: NSObject { | |
var auAudioUnit: AUAudioUnit! = nil | |
var enableRecord = true | |
var sessionActive = false | |
var audioSetupDone = false | |
var running = false | |
var sampleRate : Double = 44100.0 // desired audio sample rate | |
var f0 = 880.0 // default frequency of tone | |
var v0 = 16383.0 // default volume of tone | |
let mBufferSize = 8192 // for Audio Unit AudioBufferList mData buffer | |
let cirBuffSize = 32768 // lock-free circular fifo/buffer size | |
var circBuffer = [Int16](count: 32768, repeatedValue: 0) | |
var circInIdx : Int = 0 // sample input index | |
var circOutIdx : Int = 0 // sample output index | |
private var micPermission = false | |
private var micPermissionDispatchToken: dispatch_once_t = 0 | |
private var interrupted = false // for restart from audio interruption notification | |
func startRecording() { | |
if running { return } | |
self.enableRecord = true | |
if (sessionActive == false) { | |
// configure and activate Audio Session, this might change the sampleRate | |
setupAudioSession() | |
} | |
let audioFormat = AVAudioFormat( | |
commonFormat: AVAudioCommonFormat.PCMFormatInt16, // short int samples | |
sampleRate: Double(sampleRate), | |
channels:AVAudioChannelCount(2), | |
interleaved: true ) // interleaved stereo | |
if (auAudioUnit == nil) { | |
setupRemoteIOAudioUnit(audioFormat) | |
} | |
// not running, so start hardware | |
let renderBlock = auAudioUnit.renderBlock | |
if (enableRecord && micPermission && audioSetupDone && sessionActive) { | |
let pcmBufferSize : UInt32 = UInt32(mBufferSize) | |
let inputBuffer = AVAudioPCMBuffer( | |
PCMFormat: audioFormat, frameCapacity: pcmBufferSize) | |
auAudioUnit.inputEnabled = true | |
auAudioUnit.inputHandler = { // AUInputHandler? | |
(actionFlags, timestamp, frameCount, inputBusNumber) -> Void in | |
let err : OSStatus = | |
renderBlock(actionFlags, timestamp, | |
AUAudioFrameCount(frameCount), Int(inputBusNumber), | |
inputBuffer.mutableAudioBufferList, nil) | |
if err == noErr { | |
// save samples from current input buffer to circular buffer | |
self.copyMicrophoneInputSamples( inputBuffer.mutableAudioBufferList, | |
frameCount: UInt32(frameCount) ) | |
} | |
} | |
do { | |
circInIdx = 0 // initialize circular buffer pointers | |
circOutIdx = 0 | |
try auAudioUnit.allocateRenderResources() | |
try auAudioUnit.startHardware() // equivalent to AudioOutputUnitStart ??? | |
running = true | |
} catch { | |
// placeholder for error handling | |
} | |
} | |
} | |
func stopRecording() { | |
if (running) { | |
auAudioUnit.stopHardware() | |
running = false | |
} | |
if (sessionActive) { | |
let audioSession = AVAudioSession.sharedInstance() | |
do { | |
try audioSession.setActive(false) | |
} catch /* let error as NSError */ { | |
} | |
sessionActive = false | |
} | |
} | |
private func copyMicrophoneInputSamples( // process RemoteIO Buffer from mic input | |
inputDataList : UnsafeMutablePointer<AudioBufferList>, | |
frameCount : UInt32 ) | |
{ | |
let inputDataPtr = UnsafeMutableAudioBufferListPointer(inputDataList) | |
let mBuffers : AudioBuffer = inputDataPtr[0] | |
let count = Int(frameCount) | |
// Microphone Input Analysis | |
let data = UnsafePointer<Int16>(mBuffers.mData) | |
let dataArray = UnsafeBufferPointer<Int16>( | |
start:data, | |
count: Int(mBuffers.mDataByteSize)/sizeof(Int16) ) // words | |
var j = self.circInIdx // current circular array input index | |
let n = self.cirBuffSize | |
for i in 0..<(count/2) { | |
self.circBuffer[j ] = dataArray[i+i ] // copy left channel sample | |
self.circBuffer[j + 1] = dataArray[i+i+1] // copy right channel sample | |
j += 2 ; if j >= n { j = 0 } // into circular buffer | |
} | |
OSMemoryBarrier(); // C11 call from libkern/OSAtomic.h | |
self.circInIdx = j // circular index will always be less than size | |
} | |
var measuredMicVol : Float = 0.0 | |
func dataAvailable(enough : Int) -> Bool { | |
let buff = self.circBuffer | |
var idx = self.circOutIdx | |
var d = self.circInIdx - idx | |
// set ttd to always try to consume more data | |
// than can be produced during about 1 measurement timer interval | |
if d < 0 { d = d + self.cirBuffSize } | |
if d >= enough { // enough data in fifo | |
var sum = 0.0 | |
for _ in 0..<enough { | |
// read circular buffer and increment circular index | |
let x = Double(buff[idx]) | |
idx = idx + 1 ; if idx >= 32768 { idx = 0 } | |
// calculate total energy in buffer | |
sum = sum + (x * x) | |
} | |
self.circOutIdx = idx | |
measuredMicVol = sqrt( Float(sum) / Float(enough) ) // scaled volume | |
return(true) | |
} | |
return(false) | |
} | |
// set up and activate Audio Session | |
private func setupAudioSession() { | |
do { | |
let audioSession = AVAudioSession.sharedInstance() | |
if (enableRecord && micPermission == false) { | |
dispatch_once(&micPermissionDispatchToken) { | |
audioSession.requestRecordPermission({(granted: Bool)-> Void in | |
if granted { | |
self.micPermission = true | |
self.startRecording() | |
return | |
} else { | |
self.enableRecord = false | |
// dispatch in main/UI thread an alert | |
// informing that mic permission is not switched on | |
} | |
}) | |
} | |
} | |
if enableRecord { | |
try audioSession.setCategory(AVAudioSessionCategoryRecord) | |
} | |
var preferredIOBufferDuration = 0.0058 // 5.8 milliseconds = 256 samples | |
let hwSRate = audioSession.sampleRate // get native hardware rate | |
if hwSRate == 48000.0 { sampleRate = 48000.0 } // fix for iPhone 6s | |
if hwSRate == 48000.0 { preferredIOBufferDuration = 0.0053 } | |
try audioSession.setPreferredSampleRate(sampleRate) | |
try audioSession.setPreferredIOBufferDuration(preferredIOBufferDuration) | |
NSNotificationCenter.defaultCenter().addObserverForName( | |
AVAudioSessionInterruptionNotification, | |
object: nil, queue: nil, | |
usingBlock: myAudioSessionInterruptionHandler) | |
try audioSession.setActive(true) | |
sessionActive = true | |
} catch /* let error as NSError */ { | |
// placeholder for error handling | |
} | |
} | |
// find and set up the sample format for the RemoteIO Audio Unit | |
private func setupRemoteIOAudioUnit(audioFormat : AVAudioFormat) { | |
do { | |
let audioComponentDescription = AudioComponentDescription( | |
componentType: kAudioUnitType_Output, | |
componentSubType: kAudioUnitSubType_RemoteIO, | |
componentManufacturer: kAudioUnitManufacturer_Apple, | |
componentFlags: 0, | |
componentFlagsMask: 0 ) | |
try auAudioUnit = AUAudioUnit(componentDescription: audioComponentDescription) | |
// bus 1 is for data that the microphone exports out to the handler block | |
let bus1 = auAudioUnit.outputBusses[1] | |
try bus1.setFormat(audioFormat) // for microphone bus | |
audioSetupDone = true | |
} catch /* let error as NSError */ { | |
// placeholder for error handling | |
} | |
} | |
private func myAudioSessionInterruptionHandler(notification: NSNotification) { | |
let interuptionDict = notification.userInfo | |
if let interuptionType = interuptionDict?[AVAudioSessionInterruptionTypeKey] { | |
let interuptionVal = AVAudioSessionInterruptionType( | |
rawValue: interuptionType.unsignedIntegerValue ) | |
if (interuptionVal == AVAudioSessionInterruptionType.Began) { | |
// [self beginInterruption]; | |
if (running) { | |
auAudioUnit.stopHardware() | |
running = false | |
let audioSession = AVAudioSession.sharedInstance() | |
do { | |
try audioSession.setActive(false) | |
sessionActive = false | |
} catch { | |
// placeholder for error handling | |
} | |
interrupted = true | |
} | |
} else if (interuptionVal == AVAudioSessionInterruptionType.Ended) { | |
// [self endInterruption]; | |
if (interrupted) { | |
let audioSession = AVAudioSession.sharedInstance() | |
do { | |
try audioSession.setActive(true) | |
sessionActive = true | |
if (auAudioUnit.renderResourcesAllocated == false) { | |
try auAudioUnit.allocateRenderResources() | |
} | |
try auAudioUnit.startHardware() | |
running = true | |
} catch { | |
// placeholder for error handling | |
} | |
} | |
} | |
} | |
} | |
} |
Hello, I try to use your code but I get -50 OSStatus on line 83.
I successfully get buffers if I add try auAudioUnit.inputBusses[0].setFormat(audioFormat)
on line 222 but in this case I hear recorded sound through builtin speaker. Can you suggest something?
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Hi, I wanted implementation to play in speaker. Reverse of the above implementation. Can you help me out with API which we should access. I have done getting Mic data from phone and changing it to the format what my encoder is expecting. Now, I want the speaker playing chunck by chunck like 160 or 320 frame count. Decode it and add it to the buffer. But what API we should use to feed it to speaker? Can you help me on this?