thanks for your reply.
AudioStreamBasicDescription asbd;
memset(&asbd,0,sizeof(asbd));
asbd.mSampleRate = [[AVAudioSession sharedInstance] sampleRate];
asbd.mFormatID = kAudioFormatLinearPCM;
asbd.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved;
asbd.mChannelsPerFrame = 2; // STEREO
asbd.mFramesPerPacket = 1;
asbd.mReserved = 0;
asbd.mBitsPerChannel = sizeof(float) * 8;
asbd.mBytesPerFrame = sizeof(float);
asbd.mBytesPerPacket = sizeof(float);
checkResult(AudioUnitSetProperty(auRemoteIO,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,0,&asbd,sizeof(asbd)),"Error setting remoteIO StreamFormat");
checkResult(AudioUnitSetProperty(auMainMixer,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,0,&asbd,sizeof(asbd)),"Error setting main mixer StreamFormat");
checkResult(AudioUnitSetProperty(auMaster,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,0,&asbd,sizeof(asbd)),"Error setting master StreamFormat");
checkResult(AudioUnitSetProperty(auMixer,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input,0,&asbd,sizeof(asbd)),"Error setting mixer StreamFormat");
My audio files are mono CAF files. I dynamically load them in buffers before using them (my files are short, 5 seconds long max) and read the buffers in a callback.
I have done a test where instead of loading my CAF file, I fill the buffers with a Sine waveforms. I get the same audio result. Sounds good on ios8 but saturated on ios7.
My files are mono but the asbd I use is stereo (I use some stereo effects). I read and copy the content of the mono audio files on on both output buffers (ioData->mBuffers[1].mData = ioData->mBuffers[0].mData = audioData[…])
Thanks.
-(void) load:(NSURL*)fileNameURL{
if (leftChannel!= NULL){
free (leftChannel);
leftChannel = 0;
}
if (rightChannel != NULL){
free (rightChannel);
rightChannel = 0;
}
soundFileURLRef=(CFURLRef)fileNameURL;
//----------------------------------------------
// 1.[OPEN AUDIO FILE] and associate it with the extended audio file object.
//----------------------------------------------
ExtAudioFileRef audioFileExtendedObject = 0;
log_if_err(ExtAudioFileOpenURL((CFURLRef)soundFileURLRef,
&audioFileExtendedObject),
@"ExtAudioFileOpenURL failed");
//----------------------------------------------
// 2.[AUDIO FILE LENGTH] Get the audio file's length in frames.
//----------------------------------------------
UInt64 totalFramesInFile = 4000;
UInt32 frameLengthPropertySize = sizeof(totalFramesInFile);
log_if_err(ExtAudioFileGetProperty(audioFileExtendedObject,
kExtAudioFileProperty_FileLengthFrames,
&frameLengthPropertySize,
&totalFramesInFile),
@"ExtAudioFileGetProperty (audio file length in frames) failed");
frameCount = totalFramesInFile;
//----------------------------------------------
// 3.[AUDIO FILE FORMAT] Get the audio file's number of channels. Normally CAF.
//----------------------------------------------
AudioStreamBasicDescription fileAudioFormat = {0};
UInt32 formatPropertySize = sizeof(fileAudioFormat);
log_if_err(ExtAudioFileGetProperty(audioFileExtendedObject,
kExtAudioFileProperty_FileDataFormat,
&formatPropertySize,
&fileAudioFormat),
@"ExtAudioFileGetProperty (file audio format) failed");
//----------------------------------------------
// 4.[ALLOCATE AUDIO FILE MEMORY] Allocate memory in the soundFiles instance
// variable to hold the left channel, or mono, audio data
//----------------------------------------------
UInt32 channelCount = 1;
channelCount = fileAudioFormat.mChannelsPerFrame;
// DLog(@"fileNameURL=%@ | channelCount=%d",fileNameURL,(int)channelCount);
leftChannel = calloc (totalFramesInFile, SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE);
AudioStreamBasicDescription importFormat = {0};
if (2==channelCount) {
isStereo = YES;
if (rightChannel != NULL){
free (rightChannel);
rightChannel = 0;
}
rightChannel = calloc (totalFramesInFile, SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE);
importFormat = stereoStreamFormat;
} else if (1==channelCount) {
isStereo = NO;
importFormat = monoStreamFormat;
} else {
ExtAudioFileDispose (audioFileExtendedObject);
return;
}
//----------------------------------------------
// 5.[ASSIGN THE MIXER INPUT BUS STREAM DATA FORMAT TO THE AUDIO FILE]
//----------------------------------------------
UInt32 importFormatPropertySize = (UInt32) sizeof(importFormat);
log_if_err(ExtAudioFileSetProperty(audioFileExtendedObject,
kExtAudioFileProperty_ClientDataFormat,
importFormatPropertySize,
&importFormat),
@"ExtAudioFileSetProperty (client data format) failed");
//----------------------------------------------
// 6.[SET THE AUDIBUFFER LIST STRUCT] which has two roles:
//
// 1. It gives the ExtAudioFileRead function the configuration it
// needs to correctly provide the data to the buffer.
//
// 2. It points to the soundFiles[soundFile].leftChannel buffer, so
// that audio data obtained from disk using the ExtAudioFileRead function
// goes to that buffer
//
// Allocate memory for the buffer list struct according to the number of
// channels it represents.
//----------------------------------------------
AudioBufferList *bufferList;
bufferList = (AudioBufferList *) malloc(sizeof(AudioBufferList)+sizeof(AudioBuffer)*(channelCount-1));
if (NULL==bufferList){
NSLog(@"*** malloc failure for allocating bufferList memory");
return;
}
//----------------------------------------------
// 7.initialize the mNumberBuffers member
//----------------------------------------------
bufferList->mNumberBuffers = channelCount;
//----------------------------------------------
// 8.initialize the mBuffers member to 0
//----------------------------------------------
AudioBuffer emptyBuffer = {0};
size_t arrayIndex;
for (arrayIndex = 0; arrayIndex < channelCount; arrayIndex++) {
bufferList->mBuffers[arrayIndex] = emptyBuffer;
}
//----------------------------------------------
// 9.set up the AudioBuffer structs in the buffer list
//----------------------------------------------
bufferList->mBuffers[0].mNumberChannels = 1;
bufferList->mBuffers[0].mDataByteSize = totalFramesInFile * SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE;
bufferList->mBuffers[0].mData = leftChannel;
if (channelCount==2){
bufferList->mBuffers[1].mNumberChannels = 1;
bufferList->mBuffers[1].mDataByteSize = totalFramesInFile * SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE;
bufferList->mBuffers[1].mData = rightChannel;
}
//----------------------------------------------
// 10.Perform a synchronous, sequential read of the audio data out of the file and
// into the "soundFiles[soundFile].leftChannel" and (if stereo) ".rightChannel" members.
//----------------------------------------------
UInt32 numberOfPacketsToRead = (UInt32) totalFramesInFile;
OSStatus result = ExtAudioFileRead (audioFileExtendedObject,
&numberOfPacketsToRead,
bufferList);
free (bufferList);
if (noErr != result) {
log_if_err(result,@"ExtAudioFileRead failure");
//
// If reading from the file failed, then free the memory for the sound buffer.
//
free (leftChannel);
leftChannel = 0;
if (2==channelCount) {
free (rightChannel);
rightChannel = 0;
}
frameCount = 0;
}
//----------------------------------------------
// Dispose of the extended audio file object, which also
// closes the associated file.
//----------------------------------------------
ExtAudioFileDispose (audioFileExtendedObject);
return;
}
-(void) initStereoStreamFormat {
size_t bytesPerSample = SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE;
stereoStreamFormat.mSampleRate = [[AVAudioSession sharedInstance] sampleRate];
stereoStreamFormat.mFormatID = kAudioFormatLinearPCM;
stereoStreamFormat.mFormatFlags = AUDIO_FORMAT_FLAGS;
stereoStreamFormat.mReserved = 0;
stereoStreamFormat.mFramesPerPacket = 1;
stereoStreamFormat.mChannelsPerFrame = 2;
stereoStreamFormat.mBitsPerChannel = (uint32_t) (8*bytesPerSample);
stereoStreamFormat.mBytesPerFrame = (uint32_t) bytesPerSample;
stereoStreamFormat.mBytesPerPacket = (uint32_t) bytesPerSample;
}
-(void) initMonoStreamFormat {
size_t bytesPerSample = SIZE_OF_AUDIO_UNIT_SAMPLE_TYPE;
monoStreamFormat.mSampleRate = [[AVAudioSession sharedInstance] sampleRate];
monoStreamFormat.mFormatID = kAudioFormatLinearPCM;
monoStreamFormat.mFormatFlags = AUDIO_FORMAT_FLAGS;
monoStreamFormat.mChannelsPerFrame = 1;
monoStreamFormat.mFramesPerPacket = 1;
monoStreamFormat.mReserved = 0;
monoStreamFormat.mBitsPerChannel = bytesPerSample * 8;
monoStreamFormat.mBytesPerFrame = bytesPerSample;
monoStreamFormat.mBytesPerPacket = bytesPerSample;
//print_asbd(monoStreamFormat);
}