I'm working on a music app that reads and plays music scores. The playback is using a soundbank file with samples at 16kHz. On iOS7, the playback is not correctly played : too high and too fast. As if the 16kHz sample rate was simply ignored.
I'm aware that the "inter app" feature introduced in iOS 7 may have change a lot of thing, but I didn't find a reason or a solution in the Apple resources.
Thanks in advance.
- (void)initAUGraph
{
if (auGraph) [self releaseAUGraph];
NewAUGraph(&auGraph);
AudioComponentDescription iOUnitDescription;
iOUnitDescription.componentType = kAudioUnitType_Output;
iOUnitDescription.componentSubType = kAudioUnitSubType_RemoteIO;
iOUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
iOUnitDescription.componentFlags = 0;
iOUnitDescription.componentFlagsMask = 0;
AudioComponentDescription mixerUnitDescription;
mixerUnitDescription.componentType = kAudioUnitType_Mixer;
mixerUnitDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer;
mixerUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
mixerUnitDescription.componentFlags = 0;
mixerUnitDescription.componentFlagsMask = 0;
AUNode ioNode;
AUNode mixerNode;
OSStatus err;
err = AUGraphAddNode(auGraph, &iOUnitDescription, &ioNode); if (err) NSLog(@"AUGraphAddNode io %ld", err);
err = AUGraphAddNode(auGraph, &mixerUnitDescription, &mixerNode); if (err) NSLog(@"AUGraphAddNode mixer %ld", err);
err = AUGraphConnectNodeInput(auGraph, mixerNode, 0, ioNode, 0); if (err) NSLog(@"AUGraphConnectNodeInput mixer->io %ld", err);
err = AUGraphOpen(auGraph); if (err) NSLog(@"AUGraphOpen %ld", err);
AUGraphNodeInfo(auGraph, mixerNode, nil, &mixerUnit);
outputFormat.mSampleRate = SAMPLE_RATE;
outputFormat.mFormatID = kAudioFormatLinearPCM;
outputFormat.mFormatFlags = kAudioFormatFlagsAudioUnitCanonical;
outputFormat.mFramesPerPacket = 1;
int sampleSize = ((UInt32)sizeof(AudioUnitSampleType));
outputFormat.mBytesPerFrame = sampleSize;
outputFormat.mChannelsPerFrame = 1;
outputFormat.mBitsPerChannel = 32;
outputFormat.mBytesPerPacket = outputFormat.mBytesPerFrame;
outputFormat.mReserved = 0;
AudioUnitSetProperty(mixerUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &outputFormat, sizeof(outputFormat));
Float64 sampleRate = SAMPLE_RATE;
AudioUnitSetProperty(mixerUnit, kAudioUnitProperty_SampleRate, kAudioUnitScope_Output, 0, &sampleRate, sizeof(Float64));
UInt32 frameCount = 4096;
AudioUnitSetProperty(mixerUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Output, 0, &frameCount, sizeof(UInt32));
AURenderCallbackStruct cbStruct = { callback, self };
err = AUGraphSetNodeInputCallback(auGraph, mixerNode, 0, &cbStruct); if (err) NSLog(@"AUGraphSetNodeInputCallback %ld", err);
err = AUGraphInitialize(auGraph); if (err) NSLog(@"AUGraphInitialize %ld", err);
[self printASBD:outputFormat];
}