Created
August 8, 2013 12:09
-
-
Save icanswiftabit/6184065 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
-(void)initalizeForSender { | |
OSStatus status; | |
// We define the audio component | |
AudioComponentDescription desc; | |
desc.componentType = kAudioUnitType_Output; // we want to ouput | |
desc.componentSubType = kAudioUnitSubType_RemoteIO; // we want in and ouput | |
desc.componentFlags = 0; // must be zero | |
desc.componentFlagsMask = 0; // must be zero | |
desc.componentManufacturer = kAudioUnitManufacturer_Apple; // select provider | |
// find the AU component by description | |
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc); | |
// create audio unit by component | |
status = AudioComponentInstanceNew(inputComponent, &audioUnit); | |
[self hasError:status:__FILE__:__LINE__]; | |
// define that we want record io on the input bus | |
UInt32 flag = 1; | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_EnableIO, // use io | |
kAudioUnitScope_Input, // scope to input | |
kInputBus, // select input bus (1) | |
&flag, // set flag | |
sizeof(flag)); | |
[self hasError:status:__FILE__:__LINE__]; | |
// define that we want play on io on the output bus | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_EnableIO, // use io | |
kAudioUnitScope_Output, // scope to output | |
kOutputBus, // select output bus (0) | |
&flag, // set flag | |
sizeof(flag)); | |
[self hasError:status:__FILE__:__LINE__]; | |
/* | |
We need to specifie our format on which we want to work. | |
We use Linear PCM cause its uncompressed and we work on raw data. | |
for more informations check. | |
We want 16 bits, 2 bytes per packet/frames at 44khz | |
*/ | |
AudioStreamBasicDescription audioFormat; | |
audioFormat.mSampleRate = SAMPLE_RATE; | |
audioFormat.mFormatID = kAudioFormatLinearPCM; | |
audioFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger; | |
audioFormat.mFramesPerPacket = 1; | |
audioFormat.mChannelsPerFrame = 1; | |
audioFormat.mBitsPerChannel = 16; | |
audioFormat.mBytesPerPacket = 2; | |
audioFormat.mBytesPerFrame = 2; | |
// set the format on the output stream | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_StreamFormat, | |
kAudioUnitScope_Output, | |
kInputBus, | |
&audioFormat, | |
sizeof(audioFormat)); | |
[self hasError:status:__FILE__:__LINE__]; | |
// set the format on the input stream | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_StreamFormat, | |
kAudioUnitScope_Input, | |
kOutputBus, | |
&audioFormat, | |
sizeof(audioFormat)); | |
[self hasError:status:__FILE__:__LINE__]; | |
/** | |
We need to define a callback structure which holds | |
a pointer to the recordingCallback and a reference to | |
the audio processor object | |
*/ | |
AURenderCallbackStruct callbackStruct; | |
// set recording callback | |
//* | |
callbackStruct.inputProc = recordingCallback; // recordingCallback pointer | |
/*/ | |
callbackStruct.inputProc = (isSending)? recordingCallback : nil ; // recordingCallback pointer | |
//*/ | |
callbackStruct.inputProcRefCon = (__bridge void *)(self); | |
// set input callback to recording callback on the input bus | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_SetInputCallback, | |
kAudioUnitScope_Global, | |
kInputBus, | |
&callbackStruct, | |
sizeof(callbackStruct)); | |
[self hasError:status:__FILE__:__LINE__]; | |
/* | |
We do the same on the output stream to hear what is coming | |
from the input stream | |
*/ | |
flag = 0; | |
/* | |
we need to tell the audio unit to allocate the render buffer, | |
that we can directly write into it. | |
*/ | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_ShouldAllocateBuffer, | |
kAudioUnitScope_Output, | |
kInputBus, | |
&flag, | |
sizeof(flag)); | |
/* | |
we set the number of channels to mono and allocate our block size to | |
1024 bytes. | |
*/ | |
audioBuffer.mNumberChannels = 1; | |
audioBuffer.mDataByteSize = 512 * 2; | |
audioBuffer.mData = malloc( 512 * 2 ); | |
// Initialize the Audio Unit and cross fingers =) | |
status = AudioUnitInitialize(audioUnit); | |
[self hasError:status:__FILE__:__LINE__]; | |
NSLog(@"Started for Sender"); | |
} | |
-(void)initalizeForReciver{ | |
OSStatus status; | |
// We define the audio component | |
AudioComponentDescription desc; | |
desc.componentType = kAudioUnitType_Output; // we want to ouput | |
desc.componentSubType = kAudioUnitSubType_RemoteIO; // we want in and ouput | |
desc.componentFlags = 0; // must be zero | |
desc.componentFlagsMask = 0; // must be zero | |
desc.componentManufacturer = kAudioUnitManufacturer_Apple; // select provider | |
// find the AU component by description | |
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc); | |
// create audio unit by component | |
status = AudioComponentInstanceNew(inputComponent, &audioUnit); | |
[self hasError:status:__FILE__:__LINE__]; | |
// define that we want record io on the input bus | |
UInt32 flag = 1; | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_EnableIO, // use io | |
kAudioUnitScope_Input, // scope to input | |
kInputBus, // select input bus (1) | |
&flag, // set flag | |
sizeof(flag)); | |
[self hasError:status:__FILE__:__LINE__]; | |
// define that we want play on io on the output bus | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioOutputUnitProperty_EnableIO, // use io | |
kAudioUnitScope_Output, // scope to output | |
kOutputBus, // select output bus (0) | |
&flag, // set flag | |
sizeof(flag)); | |
[self hasError:status:__FILE__:__LINE__]; | |
/* | |
We need to specifie our format on which we want to work. | |
We use Linear PCM cause its uncompressed and we work on raw data. | |
for more informations check. | |
We want 16 bits, 2 bytes per packet/frames at 44khz | |
*/ | |
AudioStreamBasicDescription audioFormat; | |
audioFormat.mSampleRate = SAMPLE_RATE; | |
audioFormat.mFormatID = kAudioFormatLinearPCM; | |
audioFormat.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger; | |
audioFormat.mFramesPerPacket = 1; | |
audioFormat.mChannelsPerFrame = 1; | |
audioFormat.mBitsPerChannel = 16; | |
audioFormat.mBytesPerPacket = 2; | |
audioFormat.mBytesPerFrame = 2; | |
// set the format on the output stream | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_StreamFormat, | |
kAudioUnitScope_Output, | |
kInputBus, | |
&audioFormat, | |
sizeof(audioFormat)); | |
[self hasError:status:__FILE__:__LINE__]; | |
// set the format on the input stream | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_StreamFormat, | |
kAudioUnitScope_Input, | |
kOutputBus, | |
&audioFormat, | |
sizeof(audioFormat)); | |
[self hasError:status:__FILE__:__LINE__]; | |
/** | |
We need to define a callback structure which holds | |
a pointer to the recordingCallback and a reference to | |
the audio processor object | |
*/ | |
AURenderCallbackStruct callbackStruct; | |
callbackStruct.inputProc = playbackCallback; | |
/*/ | |
callbackStruct.inputProc = (isSending) ? nil : playbackCallback; | |
//*/ | |
callbackStruct.inputProcRefCon = (__bridge void *)(self); | |
// set playbackCallback as callback on our renderer for the output bus | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_SetRenderCallback, | |
kAudioUnitScope_Global, | |
kOutputBus, | |
&callbackStruct, | |
sizeof(callbackStruct)); | |
[self hasError:status:__FILE__:__LINE__]; | |
// reset flag to 0 | |
flag = 0; | |
/* | |
we need to tell the audio unit to allocate the render buffer, | |
that we can directly write into it. | |
*/ | |
status = AudioUnitSetProperty(audioUnit, | |
kAudioUnitProperty_ShouldAllocateBuffer, | |
kAudioUnitScope_Output, | |
kInputBus, | |
&flag, | |
sizeof(flag)); | |
/* | |
we set the number of channels to mono and allocate our block size to | |
1024 bytes. | |
*/ | |
audioBuffer.mNumberChannels = 1; | |
audioBuffer.mDataByteSize = 512 * 2; | |
audioBuffer.mData = malloc( 512 * 2 ); | |
// Initialize the Audio Unit and cross fingers =) | |
status = AudioUnitInitialize(audioUnit); | |
[self hasError:status:__FILE__:__LINE__]; | |
NSLog(@"Started for Reciver"); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment