Core audio app with mic input that worked now doesn't

macos 10.15.4 and xcode 10.1

I want someone to look at my code and help me understand why app not working anymore. Has to do with microphone input. I'm getting callbacks with buffere pointers but data is 0.0. This is my initialization code.


typedef struct MyRecorder

{

AudioFileID recordFile;

SInt64 recordPacket;

Float32 *pSampledData;

MorseDecode *pMorseDecoder;

} MyRecorder;


#pragma mark utility functions

void CheckError(OSStatus error, const char *operation)

{

if(error == noErr) return;

char errorString[20];

// see if it appears to be a 4 char code

*(UInt32*)(errorString + 1) = CFSwapInt32HostToBig(error);

if (isprint(errorString[1]) && isprint(errorString[2]) &&

isprint(errorString[3]) && isprint(errorString[4]))

{

errorString[0] = errorString[5] = '\'';

errorString[6] = '\0';

}

else

{

sprintf(errorString, "%d", (int)error);

}

fprintf(stderr, "Error: %s (%s)\n", operation, errorString);

}


OSStatus MyGetDefaultInputDeviceSampleRate(Float64 *outSampleRate)

{

OSStatus error;

AudioDeviceID deviceID = 0;

AudioObjectPropertyAddress propertyAddress;

UInt32 propertySize;

propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice;

propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;

propertyAddress.mElement = 0;

propertySize = sizeof(AudioDeviceID);

error = AudioObjectGetPropertyData(kAudioObjectSystemObject,

&propertyAddress,

0,

NULL,

&propertySize,

&deviceID);

if(error)

return error;

propertyAddress.mSelector = kAudioDevicePropertyNominalSampleRate;

propertyAddress.mScope = kAudioObjectPropertyScopeGlobal;

propertyAddress.mElement = 0;

propertySize = sizeof(Float64);

error = AudioObjectGetPropertyData(deviceID,

&propertyAddress,

0,

NULL,

&propertySize,

outSampleRate);

return error;

}


static int MyComputeRecordBufferSize(const AudioStreamBasicDescription *format,

AudioQueueRef queue,

float seconds)

{

int packets, frames, bytes;

frames = (int)ceil(seconds * format->mSampleRate);

if(format->mBytesPerFrame > 0)

{

bytes = frames * format->mBytesPerFrame;

}

else

{

UInt32 maxPacketSize;

if(format->mBytesPerPacket > 0)

{

// constant packet size

maxPacketSize = format->mBytesPerPacket;

}

else

{

// get the largest single packet size possible

UInt32 propertySize = sizeof(maxPacketSize);

CheckError(AudioQueueGetProperty(queue,

kAudioConverterPropertyMaximumOutputPacketSize,

&maxPacketSize,

&propertySize),

"Couldn't get queues max output packet size");

}

if(format->mFramesPerPacket > 0)

packets = frames / format->mFramesPerPacket;

else

// worst case scenario: 1 frame in a packet

packets = frames;

// sanity check

if(packets == 0)

packets = 1;

bytes = packets * maxPacketSize;

}

return bytes;

}



extern void bridgeToMainThread(MorseDecode *pDecode);


static int callBacks = 0;

// ---------------------------------------------

static void MyAQInputCallback(void *inUserData,

AudioQueueRef inQueue,

AudioQueueBufferRef inBuffer,

const AudioTimeStamp *inStartTime,

UInt32 inNumPackets,

const AudioStreamPacketDescription *inPacketDesc)

{

MyRecorder *recorder = (MyRecorder*)inUserData;

Float32 *pAudioData = (Float32*)(inBuffer->mAudioData);

recorder->pMorseDecoder->pBuffer = pAudioData;

recorder->pMorseDecoder->bufferSize = inNumPackets;

bridgeToMainThread(recorder->pMorseDecoder);

CheckError(AudioQueueEnqueueBuffer(inQueue,

inBuffer,

0,

NULL),

"AudioQueueEnqueueBuffer failed");

printf("packets = %ld, bytes = %ld\n",(long)inNumPackets,(long)inBuffer->mAudioDataByteSize);

callBacks++;

//printf("\ncallBacks = %d\n",callBacks);

//if(callBacks == 0)

//audioStop();

}



static AudioQueueRef queue = {0};

static MyRecorder recorder = {0};

static AudioStreamBasicDescription recordFormat;



void audioInit()

{

// set up format

memset(&recordFormat,0,sizeof(recordFormat));

recordFormat.mFormatID = kAudioFormatLinearPCM;

recordFormat.mChannelsPerFrame = 2;

recordFormat.mBitsPerChannel = 32;

recordFormat.mBytesPerPacket = recordFormat.mBytesPerFrame = recordFormat.mChannelsPerFrame * sizeof(Float32);

recordFormat.mFramesPerPacket = 1;

//recordFormat.mFormatFlags = kAudioFormatFlagsCanonical;

recordFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked;

MyGetDefaultInputDeviceSampleRate(&recordFormat.mSampleRate);

UInt32 propSize = sizeof(recordFormat);

CheckError(AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,

0,

NULL,

&propSize,

&recordFormat),

"AudioFormatProperty failed");

recorder.pMorseDecoder = MorseDecode::pInstance();

recorder.pMorseDecoder->m_sampleRate = recordFormat.mSampleRate;

// recorder.pMorseDecoder->setCircularBuffer();

//set up queue

CheckError(AudioQueueNewInput(&recordFormat,

MyAQInputCallback,

&recorder,

NULL,

kCFRunLoopCommonModes,

0,

&queue),

"AudioQueueNewInput failed");

UInt32 size = sizeof(recordFormat);

CheckError(AudioQueueGetProperty(queue,

kAudioConverterCurrentOutputStreamDescription,

&recordFormat,

&size), "Couldn't get queue's format");

// set up buffers and enqueue

const int kNumberRecordBuffers = 3;

int bufferByteSize = MyComputeRecordBufferSize(&recordFormat, queue, AUDIO_BUFFER_DURATION);

for(int bufferIndex = 0; bufferIndex < kNumberRecordBuffers; bufferIndex++)

{

AudioQueueBufferRef buffer;

CheckError(AudioQueueAllocateBuffer(queue,

bufferByteSize,

&buffer),

"AudioQueueAllocateBuffer failed");

CheckError(AudioQueueEnqueueBuffer(queue,

buffer,

0,

NULL),

"AudioQueueEnqueueBuffer failed");

}

}


void audioRun()

{

CheckError(AudioQueueStart(queue, NULL), "AudioQueueStart failed");

}


void audioStop()

{

CheckError(AudioQueuePause(queue), "AudioQueuePause failed");

}