case kAudioSessionBeginInterruption:
AudioOutputUnitStop( aUnit );
AudioUnitUninitialize( aUnit );
AudioComponentInstanceDispose( aUnit );
AudioSessionSetActive( false );
aUnit = nil;
break;
case kAudioSessionEndInterruption:
AudioSessionSetActive( true );
AudioComponentDescription defaultOutputDescription;
defaultOutputDescription.componentType = kAudioUnitType_Output;
defaultOutputDescription.componentSubType = kAudioUnitSubType_RemoteIO;
defaultOutputDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
defaultOutputDescription.componentFlags = 0;
defaultOutputDescription.componentFlagsMask = 0;
AudioComponent defaultOutput = AudioComponentFindNext( NULL, &defaultOutputDescription );
AudioComponentInstanceNew( defaultOutput, &aUnit );
AURenderCallbackStruct input;
input.inputProc = RenderAudio;
input.inputProcRefCon = self;
AudioUnitSetProperty( aUnit, kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input, 0, &input, sizeof(input) );
int bytes_per_float = sizeof (Float32);
int bits_per_byte = 8;
AudioStreamBasicDescription streamFormat;
streamFormat.mSampleRate = 8000;
streamFormat.mFormatID = kAudioFormatLinearPCM;
streamFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked | kAudioFormatFlagIsNonInterleaved;
streamFormat.mFramesPerPacket = 1;
streamFormat.mChannelsPerFrame = 1;
streamFormat.mBytesPerFrame = streamFormat.mChannelsPerFrame * bytes_per_float;
streamFormat.mBytesPerPacket = streamFormat.mBytesPerFrame;
streamFormat.mBitsPerChannel = bytes_per_float * bits_per_byte;
streamFormat.mReserved = 0;
AudioUnitSetProperty ( aUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input,
0, &streamFormat, sizeof(AudioStreamBasicDescription) );
rampUpVolume = 0.0;
rampUpVolumeRate = 2.0 / streamFormat.mSampleRate;
AudioUnitInitialize( aUnit ); // <--- FAILS ONLY IN BACKGROUND
AudioOutputUnitStart( aUnit );
break;
}