Post not yet marked as solved
Post marked as unsolved with 1 replies, 339 views
self.synthesizer.write(utterance) { buffer in
DispatchQueue.main.async {
guard let pcmBuffer = buffer as? AVAudioPCMBuffer, pcmBuffer.frameLength > 0 else {
return
}
// Need to convert the buffer to different format because AVAudioEngine does not support the format returned from AVSpeechSynthesizer
guard self.converter != nil,
let pcmFormat = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatFloat32, sampleRate: pcmBuffer.format.sampleRate, channels: pcmBuffer.format.channelCount, interleaved: false), let convertedBuffer = AVAudioPCMBuffer(pcmFormat: pcmFormat, frameCapacity: pcmBuffer.frameCapacity) else {
return
}
do {
try self.converter?.convert(to: convertedBuffer, from: pcmBuffer)
self.bufferCounter += 1
self.scheduleNextBuffer(convertedBuffer)
self.converter?.reset()
}
catch {
self.handleTextToSpeechError()
}
}
}