When using the unit api, I got the channel copy data in callback.

Hi all:


I'm working on audio unit record with remote i/o unit.

When I use "Voice Memos" on iPad Pro with stereo mic, I can get L/R channel seperate stereo voice data,

But when I try to use Audio Unit Remote I/O api to record , I got L/R mixed channel copy data. What am I missing?

My Test code as follows:


#define OutputElement 0
#define InputElement 1

AudioBufferList *buffList = nullptr;
AudioComponentInstance audioUnit;
ExtAudioFileRef mOutputAudioFile;
ExtAudioFileRef mInputAudioFile;
NSMutableData* outputData;
NSMutableData* inputData;


NSString* fourCharNSStringForFourCharCode(OSStatus aCode){
  
  char fourChar[5] = {static_cast<char>((aCode >> 24) & 0xFF), static_cast<char>((aCode >> 16) & 0xFF), static_cast<char>((aCode >> 8) & 0xFF), static_cast<char>(aCode & 0xFF), 0};
  
  NSString *fourCharString = [NSString stringWithCString:fourChar encoding:NSUTF8StringEncoding];
  
  return fourCharString;
  
}

void checkStatus(OSStatus status){
  NSLog(@"%@",fourCharNSStringForFourCharCode(status));
}

static OSStatus RecordCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData);

static OSStatus PlayCallback(void *inRefCon,
                                 AudioUnitRenderActionFlags *ioActionFlags,
                                 const AudioTimeStamp *inTimeStamp,
                                 UInt32 inBusNumber,
                                 UInt32 inNumberFrames,
                                 AudioBufferList *ioData);


@implementation SimpleAUImplement

- (void)initAudioSession{
  NSError *audioSessionError = nil;
  
  AVAudioSession *mysession = [AVAudioSession sharedInstance];//1
  [mysession setActive:YES error:&audioSessionError];//4
  
  [mysession setCategory:AVAudioSessionCategoryPlayAndRecord
             withOptions: AVAudioSessionCategoryOptionDefaultToSpeaker |
   AVAudioSessionCategoryOptionMixWithOthers |
   AVAudioSessionCategoryOptionAllowBluetooth |
   AVAudioSessionCategoryOptionAllowBluetoothA2DP
                   error:&audioSessionError];
  // Do any additional setup after loading the view.
  
  NSArray* avaribleInput = mysession.availableInputs;
  for (AVAudioSessionPortDescription *input in avaribleInput) {
    NSLog(@"input name %@, TypeName:%@",input.portName, input.portType);
    NSString *inputString = input.portType;
    /* input port types */
    if ([inputString hasPrefix:AVAudioSessionPortLineIn]) {
      NSLog(@"AVAudioSessionPortLineIn");
    }
    if ([inputString hasPrefix:AVAudioSessionPortBuiltInMic]) {
      NSLog(@"AVAudioSessionPortBuiltInMic");
    }
    if ([inputString hasPrefix:AVAudioSessionPortHeadsetMic]) {
      NSLog(@"AVAudioSessionPortHeadsetMic");
      [mysession setPreferredInput:input error:&audioSessionError];
      NSLog(@"setPreferredInput to AVAudioSessionPortHeadsetMic");
    }
    /* port types that refer to either input or output */
    if ([inputString hasPrefix:AVAudioSessionPortBluetoothHFP]) {
      NSLog(@"AVAudioSessionPortBluetoothHFP");
    }
    if ([inputString hasPrefix:AVAudioSessionPortUSBAudio]) {
      NSLog(@"AVAudioSessionPortUSBAudio");
    }
    if ([inputString hasPrefix:AVAudioSessionPortCarAudio]) {
      NSLog(@"AVAudioSessionPortCarAudio");
    }
  }
  
  NSArray* avaribleOutput = mysession.currentRoute.outputs;
  if(avaribleOutput.count <= 0){
    NSLog(@"No Output source.");
  }
  
  for (AVAudioSessionPortDescription *output in avaribleOutput) {
    NSLog(@"output name %@, TypeName:%@",output.portName, output.portType);
    NSString *outputString = output.portType;
    /* output port types */
    if ([outputString hasPrefix:AVAudioSessionPortLineOut]) {
      NSLog(@"AVAudioSessionPortLineOut");
    }
    if ([outputString hasPrefix:AVAudioSessionPortHeadphones]) {
      NSLog(@"AVAudioSessionPortHeadphones");
    }
    if ([outputString hasPrefix:AVAudioSessionPortBluetoothA2DP]) {
      NSLog(@"AVAudioSessionPortBluetoothA2DP");
    }
    if ([outputString hasPrefix:AVAudioSessionPortBuiltInReceiver]) {
      NSLog(@"AVAudioSessionPortBuiltInReceiver");
    }
    if ([outputString hasPrefix:AVAudioSessionPortBuiltInSpeaker]) {
      NSLog(@"AVAudioSessionPortBuiltInSpeaker");
    }
    if ([outputString hasPrefix:AVAudioSessionPortHDMI]) {
      NSLog(@"AVAudioSessionPortHDMI");
    }
    if ([outputString hasPrefix:AVAudioSessionPortAirPlay]) {
      NSLog(@"AVAudioSessionPortAirPlay");
    }
    if ([outputString hasPrefix:AVAudioSessionPortBluetoothLE]) {
      NSLog(@"AVAudioSessionPortBluetoothLE");
    }
    /* port types that refer to either input or output */
    if ([outputString hasPrefix:AVAudioSessionPortBluetoothHFP]) {
      NSLog(@"AVAudioSessionPortBluetoothHFP");
    }
    if ([outputString hasPrefix:AVAudioSessionPortUSBAudio]) {
      NSLog(@"AVAudioSessionPortUSBAudio");
    }
    if ([outputString hasPrefix:AVAudioSessionPortCarAudio]) {
      NSLog(@"AVAudioSessionPortCarAudio");
    }
  }
}

- (void)initBuffer {
  UInt32 flag = 0;
  AudioUnitSetProperty(audioUnit,
                       kAudioUnitProperty_ShouldAllocateBuffer,
                       kAudioUnitScope_Output,
                       InputElement,
                       &flag,
                       sizeof(flag));
  
  buffList = (AudioBufferList*)malloc(sizeof(AudioBufferList));
  buffList->mNumberBuffers = 1;
  buffList->mBuffers[0].mNumberChannels = 1;
  buffList->mBuffers[0].mDataByteSize = 2048 * sizeof(short);
  buffList->mBuffers[0].mData = (short *)malloc(sizeof(short) * 2048);
  
  inputData = [[NSMutableData alloc] init];
  outputData = [[NSMutableData alloc] init];
}

- (void)writeDataToFile{
  // Generate the file path
  NSString *documentsDirectory = [[SimpleAudioFileHelper sharedInstance] GetDefaultFilePath];
  
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
   
    NSString *dataPath = [documentsDirectory stringByAppendingPathComponent:@"input.caf"];
    
    // Save it into file system
    [inputData writeToFile:dataPath atomically:YES];
  });
  dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{
    NSString *dataPath = [documentsDirectory stringByAppendingPathComponent:@"output.caf"];
    // Save it into file system
    [outputData writeToFile:dataPath atomically:YES];
  });
}

- (void)initAudioComponent {
  AudioComponentDescription audioDesc;
  audioDesc.componentType = kAudioUnitType_Output;
  audioDesc.componentSubType = kAudioUnitSubType_RemoteIO;
  audioDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
  audioDesc.componentFlags = 0;
  audioDesc.componentFlagsMask = 0;
  
  AudioComponent inputComponent = AudioComponentFindNext(NULL, &audioDesc);
  AudioComponentInstanceNew(inputComponent, &audioUnit);
}

- (void)printASBD:(AudioStreamBasicDescription)asbd {
  
  char formatIDString[5];
  UInt32 formatID = CFSwapInt32HostToBig (asbd.mFormatID);
  bcopy (&formatID, formatIDString, 4);
  formatIDString[4] = '\0';
  
  NSLog (@"  Sample Rate:         %10.0f",  asbd.mSampleRate);
  NSLog (@"  Format ID:           %10s",    formatIDString);
  NSLog (@"  Format Flags:        %10X",    (unsigned int)asbd.mFormatFlags);
  NSLog (@"  Bytes per Packet:    %10d",    (unsigned int)asbd.mBytesPerPacket);
  NSLog (@"  Frames per Packet:   %10d",    (unsigned int)asbd.mFramesPerPacket);
  NSLog (@"  Bytes per Frame:     %10d",    (unsigned int)asbd.mBytesPerFrame);
  NSLog (@"  Channels per Frame:  %10d",    (unsigned int)asbd.mChannelsPerFrame);
  NSLog (@"  Bits per Channel:    %10d",    (unsigned int)asbd.mBitsPerChannel);
}

- (void)initFormat{
  AudioStreamBasicDescription audioFormat;
  audioFormat.mSampleRate = 44100;
  audioFormat.mFormatID = kAudioFormatLinearPCM;
  audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
  audioFormat.mFramesPerPacket = 1;
  audioFormat.mChannelsPerFrame = 2;
  audioFormat.mBitsPerChannel = 16;
  audioFormat.mBytesPerPacket = 4;
  audioFormat.mBytesPerFrame = 4;
  
  AudioUnitSetProperty(audioUnit,
                       kAudioUnitProperty_StreamFormat,
                       kAudioUnitScope_Output,
                       InputElement,
                       &audioFormat,
                       sizeof(audioFormat));
  AudioUnitSetProperty(audioUnit,
                       kAudioUnitProperty_StreamFormat,
                       kAudioUnitScope_Input,
                       OutputElement,
                       &audioFormat,
                       sizeof(audioFormat));
  
  
  AudioStreamBasicDescription dstFormat;
  dstFormat.mSampleRate         = 44100.00;
  dstFormat.mFormatID           = kAudioFormatLinearPCM;
  dstFormat.mFormatFlags        = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
  dstFormat.mFramesPerPacket    = 1;
  dstFormat.mChannelsPerFrame   = 2;
  dstFormat.mBitsPerChannel     = 16;
  dstFormat.mBytesPerPacket     = 4;
  dstFormat.mBytesPerFrame      = 4;
  dstFormat.mReserved           = 0;
  
//  [[SimpleAudioFileHelper sharedInstance] InitAudioFileByFormat:&dstFormat FileName:@"inputData" AudioFile:mInputAudioFile];
//  [[SimpleAudioFileHelper sharedInstance] InitAudioFileByFormat:&dstFormat FileName:@"outputData" AudioFile:mOutputAudioFile];
}

- (void)initRecordeCallback {
  AURenderCallbackStruct recordCallback;
  recordCallback.inputProc = RecordCallback;
  recordCallback.inputProcRefCon = (__bridge void *)self;
  AudioUnitSetProperty(audioUnit,
                       kAudioOutputUnitProperty_SetInputCallback,
                       kAudioUnitScope_Global,
//                       kAudioUnitScope_Input,
                       InputElement,
//                       OutputElement,
                       &recordCallback,
                       sizeof(recordCallback));
}

- (void)initPlayCallback {
  AURenderCallbackStruct playCallback;
  playCallback.inputProc = PlayCallback;
  playCallback.inputProcRefCon = (__bridge void *)self;
  AudioUnitSetProperty(audioUnit,
                       kAudioUnitProperty_SetRenderCallback,
                       kAudioUnitScope_Global,
//                       kAudioUnitScope_Output,
                       OutputElement,
//                       InputElement,
                       &playCallback,
                       sizeof(playCallback));
  
}

- (void)initAudioProperty {
  UInt32 flag = 1;
  
  AudioUnitSetProperty(audioUnit,
                       kAudioOutputUnitProperty_EnableIO,
                       kAudioUnitScope_Input,
                       InputElement,
                       &flag,
                       sizeof(flag));
  AudioUnitSetProperty(audioUnit,
                       kAudioOutputUnitProperty_EnableIO,
                       kAudioUnitScope_Input,
                       OutputElement,
                       &flag,
                       sizeof(flag));
}

- (void)InitRemoteIO{
  AudioUnitInitialize(audioUnit);
  [self initAudioSession];
  
  [self initBuffer];
  
  [self initAudioComponent];
  
  [self initFormat];
  
  [self initAudioProperty];
  
  [self initRecordeCallback];
  
  [self initPlayCallback];

}

- (void)Start{
  OSStatus status = AudioOutputUnitStart(audioUnit);
  checkStatus(status);
}

- (void)Stop{
    OSStatus status;
  status = AudioOutputUnitStop(audioUnit);
  checkStatus(status);
  [self writeDataToFile];
}

- (void)Exit{
//  AudioOutputUnitStop(audioUnit);
  AudioUnitUninitialize(audioUnit);
  AudioComponentInstanceDispose(audioUnit);

  if (buffList != NULL) {
    free(buffList);
    buffList = NULL;
  }
  
}

static OSStatus RecordCallback(void *inRefCon,
                                  AudioUnitRenderActionFlags *ioActionFlags,
                                  const AudioTimeStamp *inTimeStamp,
                                  UInt32 inBusNumber,
                                  UInt32 inNumberFrames,
                                  AudioBufferList *ioData) {
  
  AudioUnitRender(audioUnit, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, buffList);
  [inputData appendBytes:buffList->mBuffers[0].mData length:buffList->mBuffers[0].mDataByteSize/sizeof(short)];
  
  NSLog(@"RecordCallback size = %d inBusNumber:%d", buffList->mBuffers[0].mDataByteSize,inBusNumber);
  
  return noErr;
}

static OSStatus PlayCallback(void *inRefCon,
                                 AudioUnitRenderActionFlags *ioActionFlags,
                                 const AudioTimeStamp *inTimeStamp,
                                 UInt32 inBusNumber,
                                 UInt32 inNumberFrames,
                                 AudioBufferList *ioData) {

  NSLog(@"PlayCallback size2 = %d inBusNumber:%d, inNumberFrames:%d", buffList->mBuffers[0].mDataByteSize,inBusNumber,inNumberFrames);
//  memcpy(ioData->mBuffers[0].mData, buffList->mBuffers[0].mData, ioData->mBuffers[0].mDataByteSize);
  AudioUnitRender(audioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, buffList);
  
  [outputData appendBytes:buffList->mBuffers[0].mData length:buffList->mBuffers[0].mDataByteSize/sizeof(short)];
  return noErr;
}

Replies

Do you find the answer about the problem? can you share with me ? thank you !