• Open Menu Close Menu
  • Apple
  • Shopping Bag
  • Apple
  • Mac
  • iPad
  • iPhone
  • Watch
  • TV
  • Music
  • Support
  • Search apple.com
  • Shopping Bag

Lists

Open Menu Close Menu
  • Terms and Conditions
  • Lists hosted on this site
  • Email the Postmaster
  • Tips for posting to public mailing lists
Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7


  • Subject: Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7
  • From: Pascal <email@hidden>
  • Date: Mon, 01 Sep 2014 17:38:16 +0200

Hi Dave,

I had no luck following your lead. 

I have tried to reduce my app to the minimum code. But I am still having the issue with ios7.

The reduced code : 

extern NSInteger const kASF = kAudioUnitProperty_StreamFormat;
extern NSInteger const kASI = kAudioUnitScope_Input;
extern NSInteger const kASO = kAudioUnitScope_Output;
extern NSInteger const kAEC = kAudioUnitProperty_ElementCount;
extern NSInteger const kMFPS= kAudioUnitProperty_MaximumFramesPerSlice;
extern NSInteger const kAG  = kAudioUnitScope_Global;
extern NSInteger const kSR  = kAudioUnitProperty_SampleRate;

-(bool) setup{
  //----------------------------------------------
// AudioComponentDescription
//----------------------------------------------
    AudioComponentDescription descRemote = { kAudioUnitType_Output,kAudioUnitSubType_RemoteIO, kAudioUnitManufacturer_Apple,0,0 };
    AudioComponentDescription descMixer  = { kAudioUnitType_Mixer, kAudioUnitSubType_MultiChannelMixer,kAudioUnitManufacturer_Apple,0,0 };
//----------------------------------------------
// AudioUnits
//----------------------------------------------
    AudioComponent componentRemote = AudioComponentFindNext(NULL, &descRemote);
    checkRes(AudioComponentInstanceNew(componentRemote, &auRemoteIO),"AudioComponentInstanceNew RemoteIO");
AudioComponent componentMixer = AudioComponentFindNext(NULL, &descMixer);
checkRes(AudioComponentInstanceNew(componentMixer, &auMixer),"AudioComponentInstanceNew Mixer");
//------------------------------------------------------
    // stream format
//------------------------------------------------------
AudioStreamBasicDescription asbd;


memset(&asbd, 0, sizeof(asbd));
    asbd.mFormatID         = kAudioFormatLinearPCM;
    asbd.mFormatFlags      = kAudioFormatFlagIsFloat ;
    asbd.mChannelsPerFrame = 2;
    asbd.mBytesPerPacket   = sizeof(float);
     asbd.mFramesPerPacket  = 1;
    asbd.mBytesPerFrame    = sizeof(float);
    asbd.mBitsPerChannel   = 8 * sizeof(float);
    asbd.mSampleRate       = [[AVAudioSession sharedInstance] sampleRate];

checkRes(AudioUnitSetProperty(auRemoteIO,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input, 0,&asbd,sizeof(asbd)),"Error setting in remoteIO StreamFormat");
checkRes(AudioUnitSetProperty(auMixer,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Input, 0,&asbd,sizeof(asbd)),"Error setting mixer in StreamFormat");
checkRes(AudioUnitSetProperty(auMixer,kAudioUnitProperty_StreamFormat,kAudioUnitScope_Output, 0,&asbd,sizeof(asbd)),"Error setting mixer out StreamFormat");
//----------------------------------------------
// the number of inputs
//----------------------------------------------
int auMixerTracksMax = MAX_TRACKS+1;
checkRes(AudioUnitSetProperty(auMixer,kAEC,kASI,0,&auMixerTracksMax,sizeof(auMixerTracksMax)),"element count");
//----------------------------------------------
// Sample Rate
//----------------------------------------------
Float64 sampleRate= [[AVAudioSession sharedInstance] sampleRate];
checkRes(AudioUnitSetProperty(auMixer,kSR,kASO,0,&sampleRate,sizeof(sampleRate)),"sample rate");
//----------------------------------------------
// AU Connections
//----------------------------------------------
AudioUnitConnection masterToRemote = {
        .sourceAudioUnit = auMixer,
        .sourceOutputNumber = 0,
        .destInputNumber = 0
    };
    checkRes(AudioUnitSetProperty(auRemoteIO, kAudioUnitProperty_MakeConnection, kASI, 0, &masterToRemote , sizeof(AudioUnitConnection)), "master->remote");
//----------------------------------------------
// CallBacks
//----------------------------------------------
    AURenderCallbackStruct callbackMixer = {.inputProc = mixerCallback,.inputProcRefCon = (__bridge void *)(self)};
for (UInt16 inBusNumber = 0; inBusNumber < ((MAX_TRACKS+1)*POLYPHONY); inBusNumber++) {
checkRes(AudioUnitSetProperty(auMixer,kAudioUnitProperty_SetRenderCallback,kAG,inBusNumber,&callbackMixer,sizeof(callbackMixer)),"callbackMixer");
}
//------------------------------------------------------
    // Initialize the audio units
//------------------------------------------------------
    checkRes(AudioUnitInitialize(auRemoteIO), "AudioUnitInitialize RemoteIO");
    checkRes(AudioUnitInitialize(auMixer), "AudioUnitInitialize Mixer");

return true;
}

I load SIneWaves in my audio datas:

-(void) load:(NSURL*)fileNameURL{
UInt64 totalFramesInFile = 4000;
frameCount = totalFramesInFile;
UInt32 channelCount = 1;
if (leftChannel!= NULL){
free (leftChannel);
leftChannel = 0;
}
leftChannel =(float *) calloc (totalFramesInFile, sizeof(float));
isStereo = NO;
AudioBufferList *bufferList;
bufferList = (AudioBufferList *) malloc(sizeof(AudioBufferList));
//----------------------------------------------
// 7.initialize the mNumberBuffers member
//----------------------------------------------
bufferList->mNumberBuffers = channelCount;
//----------------------------------------------
// 8.initialize the mBuffers member to 0
//----------------------------------------------
AudioBuffer emptyBuffer = {0};
size_t arrayIndex;
for (arrayIndex = 0; arrayIndex < channelCount; arrayIndex++) {
bufferList->mBuffers[arrayIndex] = emptyBuffer;
}
//----------------------------------------------
// 9.set up the AudioBuffer structs in the buffer list
//----------------------------------------------
bufferList->mBuffers[0].mNumberChannels  = 1;
bufferList->mBuffers[0].mDataByteSize = totalFramesInFile * sizeof (float);
bufferList->mBuffers[0].mData = leftChannel;
if (channelCount==2){
bufferList->mBuffers[1].mNumberChannels  = 1;
bufferList->mBuffers[1].mDataByteSize = totalFramesInFile * sizeof (float);
bufferList->mBuffers[1].mData = rightChannel;
}

float freqBase= 220;

float freq = freqBase / 9 * tag + freqBase;
long period_samples = 44100 / freq;
long sample_num_ = 0;
float factor = 1.0f/MAX_TRACKS*(MAX_TRACKS-tag);
factor = factor*factor;
for (int frameIndex=0;frameIndex<totalFramesInFile;frameIndex++) {
float value;
if (period_samples==0){
value = 0;
}
else{
float x = (sample_num_ / (float)period_samples);
value = sinf(2.0f * M_PI * x);
sample_num_ = (sample_num_ + 1) % (long)period_samples;
}
value = value*factor;
leftChannel[frameIndex] = value;
if (channelCount==2) rightChannel[frameIndex] = value;
}

free (bufferList);

return;

}

Cheers,

Pascal

On 31 août 2014, at 22:26, Dave O'Neill <email@hidden> wrote:

Pascal,

That looks like a good asbd for your audio units.  One issue I've run into is setting the client format on an extAudioFileRef.  It needs the kAudioFormatFlagsNativeEndian flag as well for mono files.

Dave

On Sunday, August 31, 2014, Pascal <email@hidden> wrote:
Do not post admin requests to the list. They will be ignored. Coreaudio-api mailing list (email@hidden) Help/Unsubscribe/Update your Subscription: This email sent to email@hidden
  • Follow-Ups:
    • Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7
      • From: "Dave O'Neill" <email@hidden>
  • Next by Date: Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7
  • Next by thread: Re: from xcode5 to xcode6-beta6, sounds are now distorted on iOS7
  • Index(es):
    • Date
    • Thread