Hello,
I have some trouble using the multiRoute category in iOS10, while it was working well in IOS9. I couldnt find exactly what the problem was, but i will show you a list of symptoms. (For my tests, i used an iPad Air1 and an iPad Pro, both on IOS10. All the issues described below are not in iOS9).
- Plugging and unplugging headphones and/or a USB sound card to my iPad MAY (it's quite random actually) switch the AudioSession category to AVAudioSessionCategorySoloAmbient (while I was in multiRoute).
- If i'm not yet in AVAudioSessionCategorySoloAmbient : when I plug a USB sound card AND headphones to my iPad, the property outputNumberofChannels does not count all the channels available (my guess is that it doesnt count the headphones channels), but the maximumNumberOfChannels is the actual number of channels (headphones + USB sound card). Thus I can't set the channelMap property correctly, and I can't play any sound on the headphones. However, the current route information shows both outputPort (USB and headphones), with the right number of channels.
-Only on iPad Pro : I can't have sound playing on iPad native speakers while in multiRoute. At all.
Below is the sampleCode I used to get this problems. Using the same sample code with iOS9 works just fine.
Thank you !
Fesongs
SAMPLE CODE
#import "ViewController.h"
@import AudioToolbox;
@import Accelerate;
@import AVFoundation;
typedef struct soundStructArrayType {
AudioUnit ioUnit;
AudioStreamBasicDescription hwsf;
int index;
}soundStructArrayType;
@interface ViewController ()
@property (nonatomic, assign) soundStructArrayType soundStructArray;
@property (nonatomic, assign) AudioUnit ioUnit;
@property (nonatomic, strong) NSString* category;
//Display AudioSession information on iPad
@property (weak, nonatomic) IBOutlet UITextView *logTextView;
@property (nonatomic, strong) NSMutableString *logHistory;
@property (nonatomic, assign) BOOL logNeedupdate;
@property (nonatomic, strong) NSTimer *logTimer;
@end
@implementation ViewController
static OSStatus renderCallback (
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
)
{
soundStructArrayType* context = inRefCon;
AudioUnitRender(context->ioUnit, ioActionFlags, inTimeStamp, 1, inNumberFrames, ioData);
memcpy(ioData->mBuffers[1].mData, ioData->mBuffers[0].mData, ioData->mBuffers[0].mDataByteSize);
return noErr;
}
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
Float32 sampleRate = 44100.;
//StreamFormat
UInt32 bytesPerSample = sizeof(float);
AudioStreamBasicDescription hardwareFormat = {0};
hardwareFormat.mFormatID = kAudioFormatLinearPCM;
hardwareFormat.mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagsNativeEndian;
hardwareFormat.mBitsPerChannel = 8 * bytesPerSample;
hardwareFormat.mFramesPerPacket = 1;
hardwareFormat.mChannelsPerFrame = 2;
hardwareFormat.mBytesPerPacket = bytesPerSample;
hardwareFormat.mBytesPerFrame = bytesPerSample;
hardwareFormat.mSampleRate = sampleRate;
NSError *error = nil;
OSStatus result = noErr;
//Obtain a reference to the singleton audio session object for your application.
AVAudioSession* mySession = [AVAudioSession sharedInstance];
//Request a hardware sample rate. The system may or may not be able to grant the request, depending on other audio activity on the device.
[mySession setPreferredSampleRate:sampleRate error:&error];
//Request the audio session category you want. The "play and record category, specified here, supports audio input and output.
self.category = AVAudioSessionCategoryMultiRoute;
[mySession setCategory:self.category error:&error];
//Request activation of your audio session.
[mySession setActive:YES error:&error];
Float32 ioBufferDuration = 0.005;
_soundStructArray.index = 0;
[mySession setPreferredIOBufferDuration:ioBufferDuration error:&error];
//Specify the AudioUnits you want
AudioComponentDescription ioUnitDescription;
ioUnitDescription.componentType = kAudioUnitType_Output;
ioUnitDescription.componentSubType = kAudioUnitSubType_RemoteIO;
ioUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
ioUnitDescription.componentFlags = 0;
ioUnitDescription.componentFlagsMask = 0;
//Building an Audio Processing
AUGraph processingGraph;
NewAUGraph(&processingGraph);
AUNode ioNode;
AUGraphAddNode(processingGraph, &ioUnitDescription, &ioNode);
//Open Graph to initiate the audio Units
AUGraphOpen(processingGraph);
AUGraphNodeInfo(processingGraph, ioNode, NULL, &(_ioUnit));
//Now, ioUnit variables hold reference to the audio unit instances in the graph
//Configure the AudioUnits. No need to configure the output, it is enable by defaut.
UInt32 one = 1;
UInt32 maxFrPrSl = 2048;
AudioUnitSetProperty(_ioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &one, sizeof(one));
AudioUnitSetProperty(_ioUnit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &maxFrPrSl, sizeof(maxFrPrSl));
//Attaching a render callback in a thread-safe manner
AURenderCallbackStruct callbackStruct;
_soundStructArray.ioUnit = _ioUnit;
callbackStruct.inputProc = &renderCallback;
callbackStruct.inputProcRefCon = &_soundStructArray;
AUGraphSetNodeInputCallback(processingGraph, ioNode, 0, &callbackStruct);
result = AudioUnitSetProperty(_ioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &hardwareFormat, sizeof(hardwareFormat));
AudioUnitSetProperty(_ioUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &hardwareFormat, sizeof(hardwareFormat));
AUGraphInitialize(processingGraph);
_soundStructArray.hwsf = hardwareFormat;
AUGraphStart(processingGraph);
CAShow(processingGraph);
self.logHistory = [[NSMutableString alloc] init];
self.logTimer = [NSTimer scheduledTimerWithTimeInterval:0.05 target:self selector:@selector(updateLog) userInfo:nil repeats:YES];
[self registerForNotifications:mySession];
[self displayAudioSessionInformations];
[self multirouteAudioMappingOnAllAvailableOutput];
}
- (void)registerForNotifications:(AVAudioSession*)sessionInstance {
NSNotificationCenter* notificationCenter = [NSNotificationCenter defaultCenter];
[notificationCenter addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:sessionInstance];
}
- (void)handleRouteChange:(NSNotification*)notification {
AVAudioSession *sessionInstance = [AVAudioSession sharedInstance];
NSString *message;
// If category changed
if (self.category != [sessionInstance category]) {
message = [NSString stringWithFormat:@"AUDIO SESSION CATEGORY CHANGED. New AudioSession Category : %@", [sessionInstance category]];
[self audioSessionLog:message];
self.category = [sessionInstance category];
}
[self multirouteAudioMappingOnAllAvailableOutput];
[self displayAudioSessionInformations];
}
- (void)multirouteAudioMappingOnAllAvailableOutput {
AVAudioSession *sessionInstance = [AVAudioSession sharedInstance];
UInt32 outputNbChannels = (UInt32)sessionInstance.outputNumberOfChannels;
if (outputNbChannels != 0) {
UInt32* outputChannelMap = (UInt32*)calloc(outputNbChannels, sizeof(UInt32));
//Mapping input on all outputs available.
for (int i = 0; i<outputNbChannels; ++i) {
outputChannelMap[i] = i%2;
}
OSStatus result = AudioUnitSetProperty(_ioUnit, kAudioOutputUnitProperty_ChannelMap, kAudioUnitScope_Output, 0, outputChannelMap, outputNbChannels*sizeof(UInt32));
if (result) [self audioSessionLog:@"Error in setting output channel map"];
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/////////////////// Display information
- (void)updateLog {
if (!self.logNeedupdate) return;
dispatch_async(dispatch_get_main_queue(), ^{
self.logTextView.scrollEnabled = NO;
self.logTextView.text = self.logHistory;
self.logTextView.scrollEnabled = YES;
[self.logTextView scrollRangeToVisible:NSMakeRange(self.logTextView.text.length-1, 1)];
self.logNeedupdate = NO;
});
}
- (void)audioSessionLog:(NSString*)message {
dispatch_async(dispatch_get_main_queue(), ^{
[self.logHistory appendString:@"\n"];
[self.logHistory appendString:message];
self.logNeedupdate = YES;
});
}
- (void) displayAudioSessionInformations {
AVAudioSession *sessionInstance = [AVAudioSession sharedInstance];
NSString *message;
message = @"\n*************************************************************************************************************************************************************";
[self audioSessionLog:message];
message = @"Audio Session Information :\n";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"AudioSession category : %@", [sessionInstance category]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"AudioSession mode : %@", [sessionInstance mode]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"AudioSession preffered sample rate : %f, sample rate : %f", sessionInstance.preferredSampleRate, sessionInstance.sampleRate];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"OutputVolume : %f, InputGain : %f, isInputGainSettable : %d", [sessionInstance outputVolume], [sessionInstance inputGain], (int)[sessionInstance isInputGainSettable]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"IOBufferDutation : %f, InputLatency : %f, OutputLatency : %f", [sessionInstance IOBufferDuration], [sessionInstance inputLatency], [sessionInstance outputLatency]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"Num Channels Input : %d, max : %d, prefered : %d", (int)[sessionInstance inputNumberOfChannels], (int)[sessionInstance maximumInputNumberOfChannels], (int)[sessionInstance preferredInputNumberOfChannels]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"Num Channels Output : %d, max : %d, prefered : %d", (int)[sessionInstance outputNumberOfChannels], (int)[sessionInstance maximumOutputNumberOfChannels], (int)[sessionInstance preferredOutputNumberOfChannels]];
[self audioSessionLog:message];
message = @" ";
[self audioSessionLog:message];
NSArray *inputsArray = [sessionInstance availableInputs];
NSArray *inputSourcesArray = [sessionInstance inputDataSources];
NSArray *outputSourcesArray = [sessionInstance outputDataSources];
message = [NSString stringWithFormat:@"Is input available? : %d", [sessionInstance isInputAvailable]];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"***** Number of input ports : %lu", (unsigned long)inputsArray.count];
[self audioSessionLog:message];
for (NSUInteger i=0; i<inputsArray.count; i++) {
AVAudioSessionPortDescription *portDesc = [inputsArray objectAtIndex:i];
message = [NSString stringWithFormat:@" INPUT PORT %lu", (unsigned long)i];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portName : %@", portDesc.portName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portType : %@", portDesc.portType];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" UID : %@", portDesc.UID];
[self audioSessionLog:message];
NSArray *channelsArray = portDesc.channels;
message = [NSString stringWithFormat:@" ***** Number of port channels : %lu", (unsigned long)channelsArray.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<channelsArray.count; j++) {
AVAudioSessionChannelDescription *channel = [channelsArray objectAtIndex:j];
message = [NSString stringWithFormat:@" CHANNEL %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelName : %@", channel.channelName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelNumber : %lu", (unsigned long)channel.channelNumber];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" owningPortUID : %@", channel.owningPortUID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelLabel : %d", (unsigned int)channel.channelLabel];
[self audioSessionLog:message];
}
NSArray *portSources = portDesc.dataSources;
message = [NSString stringWithFormat:@" ***** Number of port sources : %lu", (unsigned long)portSources.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<portSources.count; j++) {
AVAudioSessionDataSourceDescription *source = [portSources objectAtIndex:j];
message = [NSString stringWithFormat:@" PORT DATASOURCE %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
}
message = @" ";
[self audioSessionLog:message];
AVAudioSessionPortDescription *portDesc = [sessionInstance preferredInput];
message = @"***** Prefered input port";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portName : %@", portDesc.portName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portType : %@", portDesc.portType];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" UID : %@", portDesc.UID];
[self audioSessionLog:message];
message = @" ";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"***** Number of input datasources : %lu", (unsigned long)inputSourcesArray.count];
[self audioSessionLog:message];
for (NSUInteger i=0; i<inputSourcesArray.count; i++) {
AVAudioSessionDataSourceDescription *source = [inputSourcesArray objectAtIndex:i];
message = [NSString stringWithFormat:@" INPUT DATASOURCE %lu", (unsigned long)i];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
AVAudioSessionDataSourceDescription *inputSource = [sessionInstance inputDataSource];
message = @"***** Selected Input Datasource";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", inputSource.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", inputSource.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", inputSource.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", inputSource.orientation];
[self audioSessionLog:message];
message = @" ";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"***** Number of output sources : %lu", (unsigned long)outputSourcesArray.count];
[self audioSessionLog:message];
for (NSUInteger i=0; i<outputSourcesArray.count; i++) {
AVAudioSessionDataSourceDescription *source = [outputSourcesArray objectAtIndex:i];
message = [NSString stringWithFormat:@" DATASOURCE %lu", (unsigned long)i];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
AVAudioSessionDataSourceDescription *outputSource = [sessionInstance outputDataSource];
message = @"***** Selected Output Datasource";
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", outputSource.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", outputSource.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", outputSource.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", outputSource.orientation];
[self audioSessionLog:message];
message = @"\n";
[self audioSessionLog:message];
AVAudioSessionRouteDescription *route = [sessionInstance currentRoute];
NSArray *currentInputsArray = route.inputs;
NSArray *currentOutputsArray = route.outputs;
message = [NSString stringWithFormat:@"********** CURRENT ROUTE **********"];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@"***** Number of input ports : %lu", (unsigned long)currentInputsArray.count];
[self audioSessionLog:message];
for (NSUInteger i=0; i<currentInputsArray.count; i++) {
AVAudioSessionPortDescription *portDesc = [currentInputsArray objectAtIndex:i];
message = [NSString stringWithFormat:@" INPUT PORT %lu", (unsigned long)i];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portName : %@", portDesc.portName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portType : %@", portDesc.portType];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" UID : %@", portDesc.UID];
[self audioSessionLog:message];
NSArray *channelsArray = portDesc.channels;
message = [NSString stringWithFormat:@" ***** Number of port channels : %lu", (unsigned long)channelsArray.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<channelsArray.count; j++) {
AVAudioSessionChannelDescription *channel = [channelsArray objectAtIndex:j];
message = [NSString stringWithFormat:@" CHANNEL %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelName : %@", channel.channelName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelNumber : %lu", (unsigned long)channel.channelNumber];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" owningPortUID : %@", channel.owningPortUID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelLabel : %d", (int)channel.channelLabel];
[self audioSessionLog:message];
}
NSArray *portSources = portDesc.dataSources;
message = [NSString stringWithFormat:@" ***** Number of port sources : %lu", (unsigned long)portSources.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<portSources.count; j++) {
AVAudioSessionDataSourceDescription *source = [portSources objectAtIndex:j];
message = [NSString stringWithFormat:@" PORT DATASOURCE %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
AVAudioSessionDataSourceDescription *source = [portDesc selectedDataSource];
message = [NSString stringWithFormat:@" ***** PORT SELECTED DATASOURCE"];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
message = [NSString stringWithFormat:@"***** Number of output ports : %lu", (unsigned long)currentOutputsArray.count];
[self audioSessionLog:message];
for (NSUInteger i=0; i<currentOutputsArray.count; i++) {
AVAudioSessionPortDescription *portDesc = [currentOutputsArray objectAtIndex:i];
message = [NSString stringWithFormat:@" OUTPUT PORT %lu", (unsigned long)i];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portName : %@", portDesc.portName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" portType : %@", portDesc.portType];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" UID : %@", portDesc.UID];
[self audioSessionLog:message];
NSArray *channelsArray = portDesc.channels;
message = [NSString stringWithFormat:@" ***** Number of port channels : %lu", (unsigned long)channelsArray.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<channelsArray.count; j++) {
AVAudioSessionChannelDescription *channel = [channelsArray objectAtIndex:j];
message = [NSString stringWithFormat:@" CHANNEL %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelName : %@", channel.channelName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelNumber : %lu", (unsigned long)channel.channelNumber];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" owningPortUID : %@", channel.owningPortUID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" channelLabel : %d", (unsigned int)channel.channelLabel];
[self audioSessionLog:message];
}
NSArray *portSources = portDesc.dataSources;
message = [NSString stringWithFormat:@" ***** Number of port sources : %lu", (unsigned long)portSources.count];
[self audioSessionLog:message];
for (NSUInteger j=0; j<portSources.count; j++) {
AVAudioSessionDataSourceDescription *source = [portSources objectAtIndex:j];
message = [NSString stringWithFormat:@" PORT DATASOURCE %lu", (unsigned long)j];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
AVAudioSessionDataSourceDescription *source = [portDesc selectedDataSource];
message = [NSString stringWithFormat:@" ***** PORT SELECTED DATASOURCE"];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceID : %@", source.dataSourceID];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" dataSourceName : %@", source.dataSourceName];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" location : %@", source.location];
[self audioSessionLog:message];
message = [NSString stringWithFormat:@" orientation : %@", source.orientation];
[self audioSessionLog:message];
}
message = [NSString stringWithFormat:@"********** END CURRENT ROUTE **********"];
[self audioSessionLog:message];
message = @"\n*************************************************************************************************************************************************************\n";
[self audioSessionLog:message];
}