AudioStreamManager.swift 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. /*
  2. * Copyright 2020, gRPC Authors All rights reserved.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. // NOTE: Implementation based off of Google's for Audio Streaming:
  17. // https://github.com/GoogleCloudPlatform/ios-docs-samples/blob/master/speech/Swift/Speech-gRPC-Streaming/Speech/AudioController.swift
  18. import AVFoundation
  19. import Foundation
  20. enum AudioStreamError: Error {
  21. case failedToConfigure
  22. case failedToFindAudioComponent
  23. case failedToFindMicrophoneUnit
  24. }
  25. protocol StreamDelegate: AnyObject {
  26. func processAudio(_ data: Data)
  27. }
  28. class AudioStreamManager {
  29. var microphoneUnit: AudioComponentInstance?
  30. weak var delegate: StreamDelegate?
  31. static var shared = AudioStreamManager()
  32. // Type used for audio unit elements. Bus 1 is input scope, element 1.
  33. private let bus1: AudioUnitElement = 1
  34. deinit {
  35. if let microphoneUnit = microphoneUnit {
  36. AudioComponentInstanceDispose(microphoneUnit)
  37. }
  38. }
  39. func configure() throws {
  40. try self.configureAudioSession()
  41. var audioComponentDescription = self.describeComponent()
  42. guard let remoteIOComponent = AudioComponentFindNext(nil, &audioComponentDescription) else {
  43. throw AudioStreamError.failedToFindAudioComponent
  44. }
  45. AudioComponentInstanceNew(remoteIOComponent, &self.microphoneUnit)
  46. try self.configureMicrophoneForInput()
  47. try self.setFormatForMicrophone()
  48. try self.setCallback()
  49. if let microphoneUnit = self.microphoneUnit {
  50. let status = AudioUnitInitialize(microphoneUnit)
  51. if status != noErr {
  52. throw AudioStreamError.failedToConfigure
  53. }
  54. }
  55. }
  56. func start() {
  57. guard let microphoneUnit = self.microphoneUnit else { return }
  58. AudioOutputUnitStart(microphoneUnit)
  59. }
  60. func stop() {
  61. guard let microphoneUnit = self.microphoneUnit else { return }
  62. AudioOutputUnitStop(microphoneUnit)
  63. }
  64. private func configureAudioSession() throws {
  65. let session = AVAudioSession.sharedInstance()
  66. try session.setCategory(.record)
  67. try session.setPreferredIOBufferDuration(10)
  68. }
  69. private func describeComponent() -> AudioComponentDescription {
  70. var description = AudioComponentDescription()
  71. description.componentType = kAudioUnitType_Output
  72. description.componentSubType = kAudioUnitSubType_RemoteIO
  73. description.componentManufacturer = kAudioUnitManufacturer_Apple
  74. description.componentFlags = 0
  75. description.componentFlagsMask = 0
  76. return description
  77. }
  78. private func configureMicrophoneForInput() throws {
  79. guard let microphoneUnit = self.microphoneUnit else {
  80. throw AudioStreamError.failedToFindMicrophoneUnit
  81. }
  82. var oneFlag: UInt32 = 1
  83. let status = AudioUnitSetProperty(
  84. microphoneUnit,
  85. kAudioOutputUnitProperty_EnableIO,
  86. kAudioUnitScope_Input,
  87. self.bus1,
  88. &oneFlag,
  89. UInt32(MemoryLayout<UInt32>.size)
  90. )
  91. if status != noErr {
  92. throw AudioStreamError.failedToConfigure
  93. }
  94. }
  95. private func setFormatForMicrophone() throws {
  96. guard let microphoneUnit = self.microphoneUnit else {
  97. throw AudioStreamError.failedToFindMicrophoneUnit
  98. }
  99. /*
  100. Configure Audio format to match initial message sent
  101. over bidirectional stream. Config and below must match.
  102. */
  103. var asbd = AudioStreamBasicDescription()
  104. asbd.mSampleRate = Double(Constants.sampleRate)
  105. asbd.mFormatID = kAudioFormatLinearPCM
  106. asbd.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked
  107. asbd.mBytesPerPacket = 2
  108. asbd.mFramesPerPacket = 1
  109. asbd.mBytesPerFrame = 2
  110. asbd.mChannelsPerFrame = 1
  111. asbd.mBitsPerChannel = 16
  112. let status = AudioUnitSetProperty(
  113. microphoneUnit,
  114. kAudioUnitProperty_StreamFormat,
  115. kAudioUnitScope_Output,
  116. self.bus1,
  117. &asbd,
  118. UInt32(MemoryLayout<AudioStreamBasicDescription>.size)
  119. )
  120. if status != noErr {
  121. throw AudioStreamError.failedToConfigure
  122. }
  123. }
  124. private func setCallback() throws {
  125. guard let microphoneUnit = self.microphoneUnit else {
  126. throw AudioStreamError.failedToFindMicrophoneUnit
  127. }
  128. var callbackStruct = AURenderCallbackStruct()
  129. callbackStruct.inputProc = recordingCallback
  130. callbackStruct.inputProcRefCon = nil
  131. let status = AudioUnitSetProperty(
  132. microphoneUnit,
  133. kAudioOutputUnitProperty_SetInputCallback,
  134. kAudioUnitScope_Global,
  135. self.bus1,
  136. &callbackStruct,
  137. UInt32(MemoryLayout<AURenderCallbackStruct>.size)
  138. )
  139. if status != noErr {
  140. throw AudioStreamError.failedToConfigure
  141. }
  142. }
  143. }
  144. func recordingCallback(
  145. inRefCon: UnsafeMutableRawPointer,
  146. ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
  147. inTimeStamp: UnsafePointer<AudioTimeStamp>,
  148. inBusNumber: UInt32,
  149. inNumberFrames: UInt32,
  150. ioData: UnsafeMutablePointer<AudioBufferList>?
  151. ) -> OSStatus {
  152. var status = noErr
  153. let channelCount: UInt32 = 1
  154. var bufferList = AudioBufferList()
  155. bufferList.mNumberBuffers = channelCount
  156. let buffers = UnsafeMutableBufferPointer<AudioBuffer>(
  157. start: &bufferList.mBuffers,
  158. count: Int(bufferList.mNumberBuffers)
  159. )
  160. buffers[0].mNumberChannels = 1
  161. buffers[0].mDataByteSize = inNumberFrames * 2
  162. buffers[0].mData = nil
  163. // get the recorded samples
  164. guard let remoteIOUnit = AudioStreamManager.shared.microphoneUnit else { fatalError() }
  165. status = AudioUnitRender(
  166. remoteIOUnit,
  167. ioActionFlags,
  168. inTimeStamp,
  169. inBusNumber,
  170. inNumberFrames,
  171. UnsafeMutablePointer<AudioBufferList>(&bufferList)
  172. )
  173. if status != noErr {
  174. return status
  175. }
  176. guard let bytes = buffers[0].mData else {
  177. fatalError("Unable to find pointer to the buffer audio data")
  178. }
  179. let data = Data(bytes: bytes, count: Int(buffers[0].mDataByteSize))
  180. DispatchQueue.main.async {
  181. AudioStreamManager.shared.delegate?.processAudio(data)
  182. }
  183. return noErr
  184. }