Hello, according to your prompt, I have successfully used AudioUnit to play PCM data, and the CPU usage rate is also very low, but I found that the memory usage will grow more severely. According to my understanding of ringbuffer, ringbuffer should be recycling the same memory. Well, my memory usage should be a repetitively fluctuating data, but my current memory usage is a continuous growth process. My current approach is to apply for an AudioBufferList, and then go back to AudioBufferList-> mBuffers [0] .mData by calling back the function. Below is my code. Is my understanding of ringbuffer wrong? Or is there a place where I need to manually release the memory? Of course, the speed of loading data on the network is much faster than the speed of my playback, but when I use Android's oboe to play, there will be no problem of continuous memory growth.
- (void)initPlayer {
NSError *error = nil;
OSStatus status = noErr;
// set audio session
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:&error];
AudioComponentDescription audioDesc;
audioDesc.componentType = kAudioUnitType_Output;
audioDesc.componentSubType = kAudioUnitSubType_RemoteIO;
audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
audioDesc.componentFlags = 0;
audioDesc.componentFlagsMask = 0;
AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDesc);
AudioComponentInstanceNew(inputComponent, &audioUnit);
// buffer
buffList = (AudioBufferList *)malloc(sizeof(AudioBufferList));
buffList->mNumberBuffers = 1;
buffList->mBuffers[0].mNumberChannels = 1;
buffList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
buffList->mBuffers[0].mData = malloc(CONST_BUFFER_SIZE);
//audio property
UInt32 flag = 1;
if (flag) {
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
OUTPUT_BUS,
&flag,
sizeof(flag));
}
if (status) {
NSLog(@"AudioUnitSetProperty error with status:%d", status);
}
// format
AudioStreamBasicDescription outputFormat;
memset(&outputFormat, 0, sizeof(outputFormat));
outputFormat.mSampleRate = 44100;
outputFormat.mFormatID = kAudioFormatLinearPCM;
outputFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
outputFormat.mFramesPerPacket = 1;
outputFormat.mChannelsPerFrame = 1;
outputFormat.mBytesPerFrame = 2;
outputFormat.mBytesPerPacket = 2;
outputFormat.mBitsPerChannel = 16;
[self printAudioStreamBasicDescription:outputFormat];
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
OUTPUT_BUS,
&outputFormat,
sizeof(outputFormat));
if (status) {
NSLog(@"AudioUnitSetProperty eror with status:%d", status);
}
// callback
AURenderCallbackStruct playCallback;
playCallback.inputProc = PlayCallback;
playCallback.inputProcRefCon = (__bridge void *)self;
AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
OUTPUT_BUS,
&playCallback,
sizeof(playCallback));
OSStatus result = AudioUnitInitialize(audioUnit);
NSLog(@"result %d", result);
}
static OSStatus PlayCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData)
{
LYPlayer *player = (__bridge LYPlayer *)inRefCon;
int buffSize = 2 * inNumberFrames;
while (buffSize > 0) {
if (player->webpp.m_list.empty()) {
memset(ioData->mBuffers[0].mData, 0, buffSize);
player->webpp.g_lock.unlock();
return noErr;
}
player->webpp.g_lock.lock();
std::string dataString = player->webpp.m_list.front();
player->webpp.m_list.pop_front();
int dataSize = std::min(buffSize, (int) dataString.size());
memcpy(ioData->mBuffers[0].mData, dataString.data(), dataSize);
if (dataString.size() > buffSize) {
player->webpp.m_list.emplace_front(dataString.substr(
dataSize,
dataString.size() - dataSize));
}
player->webpp.g_lock.unlock();
buffSize -= dataSize;
}
return noErr;
}