• Open Menu Close Menu
  • Apple
  • Shopping Bag
  • Apple
  • Mac
  • iPad
  • iPhone
  • Watch
  • TV
  • Music
  • Support
  • Search apple.com
  • Shopping Bag

Lists

Open Menu Close Menu
  • Terms and Conditions
  • Lists hosted on this site
  • Email the Postmaster
  • Tips for posting to public mailing lists
RemoteIO Audio File Playback Issues
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

RemoteIO Audio File Playback Issues


  • Subject: RemoteIO Audio File Playback Issues
  • From: Carter Allen <email@hidden>
  • Date: Tue, 15 Jun 2010 12:48:06 -0600

Hello again!

I am trying to play back a CAF audio file using the RemoteIO AudioUnit on the iPad. I am using a test file with a single tone in it, and that file is 30 seconds long. I tried to write code so that it would read all of this data into a buffer and then play back all 30 seconds, but it doesn't work:  currently it makes a popping noise and then stops. I'm wondering if someone can see anything obviously wrong with my implementation. Here is the relevant code:

OSStatus audioCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
	AudioUnit_DemoViewController *this = (AudioUnit_DemoViewController *)inRefCon;
	UInt32 frameByteSize = inNumberFrames * sizeof(AudioUnitSampleType);
	OSStatus err = noErr;
	if (this.playing) {
		if (this.mMaxFrame > 0) {
			NSLog(@"frameByteSize = %d, inNumberFrames = %d, sizeof(SInt16) = %d", frameByteSize, inNumberFrames, sizeof(SInt16));
			UInt32 nextFrame = this.mMaxFrame - this.mReadFrame;
			NSLog(@"nextFrame = %d", nextFrame);
			if (nextFrame > inNumberFrames) nextFrame = inNumberFrames;
			for (NSInteger i = 0; i < ioData->mNumberBuffers; i++) {
				SInt16 *p = (SInt16 *)ioData->mBuffers[i].mData;
				if (nextFrame < inNumberFrames) memset(p, 0, frameByteSize);
				memcpy(p, this.mFrameData + this.mReadFrame, nextFrame * sizeof(SInt16));
			}
			this.mReadFrame += nextFrame;
			if (this.mReadFrame == this.mMaxFrame) {
				NSLog (@ "inNumberFrames =% d", inNumberFrames);
				this.playing = NO;
			}
		}
	}
	else {
		for (NSInteger i = 0; i < ioData->mNumberBuffers; i ++) {
			SInt32 *p = (SInt32 *) ioData->mBuffers[i].mData;
			memset(p, 0, frameByteSize);
		}
	}
	return err;
}
- (id)init {
	if ((self = [super init])) {
		mFrameData = NULL;
		mReadFrame = 0;
		playing = NO;
	}
	return self;
}
- (IBAction)play {
	mReadFrame = 0;
	if (!playing) playing = YES;
}
- (IBAction)stop { mReadFrame = 0; }
- (void)viewDidLoad {
    [super viewDidLoad];

	// Start an audio session
	AudioSessionSetActive(true);

	// Create an OSStatus
	OSStatus status;

	// Describe the audio unit
	AudioComponentDescription description;
	memset(&description, 0, sizeof(description));
	description.componentType = kAudioUnitType_Output;
	description.componentSubType = kAudioUnitSubType_RemoteIO;
	description.componentFlags = 0;
	description.componentFlagsMask = 0;
	description.componentManufacturer = kAudioUnitManufacturer_Apple;

	// Get the audio unit
	AudioComponent audioUnit = AudioComponentFindNext(NULL, &description);

	// Create an instance of the audio unit
	AudioComponentInstance audioUnitInstance;
	status = AudioComponentInstanceNew(audioUnit, &audioUnitInstance);

	checkStatus(status);

	// Enable the speaker
	UInt32 flag = 1;
    status = AudioUnitSetProperty(audioUnitInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));

	checkStatus(status);

	// Find the test tone file
	CFBundleRef mainBundle = CFBundleGetMainBundle();
	CFURLRef testToneURL = CFBundleCopyResourceURL(mainBundle, CFSTR("Test Tone"), CFSTR("caf"), NULL);
	ExtAudioFileRef toneFile;
	status = ExtAudioFileOpenURL(testToneURL, &toneFile);

	checkStatus(status);

	// Describe the test tone file
	AudioStreamBasicDescription dataFormat;
	UInt32 size = sizeof (AudioStreamBasicDescription);
	ExtAudioFileGetProperty(toneFile, kExtAudioFileProperty_FileDataFormat, &size, &dataFormat);
	NSLog(@"fileFormat.mSampleRate = %f", dataFormat.mSampleRate);
	NSLog(@"fileFormat.mChannelsPerFrame = %d", dataFormat.mChannelsPerFrame);
	NSLog(@"fileFormat.mBitsPerChannel = %d", dataFormat.mBitsPerChannel);
	NSLog(@"fileFormat.mBytesPerFrame = %d", dataFormat.mBytesPerFrame);
	size = sizeof(SInt64);
	SInt64 fileLength;
	ExtAudioFileGetProperty(toneFile, kExtAudioFileProperty_FileLengthFrames, &size, &fileLength);

	// Set the input format for the audio unit to the test tone's data format
	status = AudioUnitSetProperty(audioUnitInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &dataFormat, sizeof(dataFormat));

	checkStatus(status);

	// Read the tone file into a buffer
	AudioBufferList toneBuffer;
	toneBuffer.mNumberBuffers = 1;
	toneBuffer.mBuffers[0].mNumberChannels = 1;
	toneBuffer.mBuffers[0].mDataByteSize = ((1024 * 2) * dataFormat.mChannelsPerFrame);
	toneBuffer.mBuffers[0].mData = malloc(toneBuffer.mBuffers[0].mDataByteSize * fileLength);
	NSLog(@"fileLength = %d	mDataByteSize = %u sizeof(int) = %d (fileLength * sizeof(int)) = %d", fileLength, toneBuffer.mBuffers[0].mDataByteSize, sizeof(int), (fileLength * sizeof(int)));
	mMaxFrame = fileLength;
	ExtAudioFileRead(toneFile, &mMaxFrame, &toneBuffer);
	NSLog(@"fileLength = %d", fileLength);
	NSLog(@"readFrames = %d", mMaxFrame);
	ExtAudioFileDispose(toneFile);
	mReadFrame = 0;
	mFrameData = toneBuffer.mBuffers[0].mData;

	// Set up the render callback for the audio unit
	AURenderCallbackStruct callback;
    callback.inputProc = audioCallback;
    callback.inputProcRefCon = self;
    status = AudioUnitSetProperty(audioUnitInstance, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, kOutputBus, &callback, sizeof(callback));

	checkStatus(status);

	// Initialize the audio unit
    status = AudioUnitInitialize(audioUnitInstance);

	checkStatus(status);

	// Start the audio unit
	AudioOutputUnitStart(audioUnitInstance);
}

When I first load the view, the following is printed to the console:

2010-06-15 12:41:51.956 AudioUnit Demo[24714:40b] fileFormat.mSampleRate = 44100.000000
2010-06-15 12:41:51.958 AudioUnit Demo[24714:40b] fileFormat.mChannelsPerFrame = 2
2010-06-15 12:41:51.958 AudioUnit Demo[24714:40b] fileFormat.mBitsPerChannel = 16
2010-06-15 12:41:51.959 AudioUnit Demo[24714:40b] fileFormat.mBytesPerFrame = 4
2010-06-15 12:41:51.959 AudioUnit Demo[24714:40b] fileLength = 1351680	mDataByteSize = 0 sizeof(int) = 4096 (fileLength * sizeof(int)) = 4
2010-06-15 12:41:51.960 AudioUnit Demo[24714:40b] fileLength = 1351680
2010-06-15 12:41:51.960 AudioUnit Demo[24714:40b] readFrames = 1024

After running the play method, This is printed to the console:

2010-06-15 12:41:55.155 AudioUnit Demo[24714:b803] frameByteSize = 2048, inNumberFrames = 512, sizeof(SInt16) = 2
2010-06-15 12:41:55.156 AudioUnit Demo[24714:b803] nextFrame = 1024

Any insight is appreciated!

Sincerely,
Carter Allen _______________________________________________
Do not post admin requests to the list. They will be ignored.
Coreaudio-api mailing list      (email@hidden)
Help/Unsubscribe/Update your Subscription:

This email sent to email@hidden

  • Follow-Ups:
    • Re: RemoteIO Audio File Playback Issues
      • From: uɐıʇəqɐz pnoqɥɒɯ <email@hidden>
  • Prev by Date: Re: long double data type
  • Next by Date: Re: long double data type
  • Previous by thread: Re: long double data type
  • Next by thread: Re: RemoteIO Audio File Playback Issues
  • Index(es):
    • Date
    • Thread