UNIT private func setupAudioUnit() { var desc: AudioComponentDescription = AudioComponentDescription() desc.componentType = kAudioUnitType_Output desc.componentSubType = kAudioUnitSubType_RemoteIO desc.componentFlags = 0 desc.componentFlagsMask = 0 desc.componentManufacturer = kAudioUnitManufacturer_Apple var status: OSStatus = noErr let inputComponent: AudioComponent = AudioComponentFindNext(nil, &desc)! var tempAudioUnit: AudioUnit? status = AudioComponentInstanceNew(inputComponent, &tempAudioUnit) self.audioUnit = tempAudioUnit guard let au = self.audioUnit else { return } var one: UInt32 = 1 status = AudioUnitSetProperty(au, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &one, UInt32(MemoryLayout<UInt32>.size)) // Set format to 32-bit Floats, linear PCM let numChannels = 2 // 2 channel stereo var audioFormat: AudioStreamBasicDescription! = AudioStreamBasicDescription() audioFormat.mSampleRate = 1024 audioFormat.mFormatID = kAudioFormatLinearPCM audioFormat.mFormatFlags = kAudioFormatFlagsNativeFloatPacked audioFormat.mFramesPerPacket = 1 audioFormat.mChannelsPerFrame = 2 audioFormat.mBitsPerChannel = UInt32(8 * MemoryLayout<Float>.size) audioFormat.mBytesPerPacket = UInt32(numChannels * MemoryLayout<Float>.size) audioFormat.mBytesPerFrame = UInt32(numChannels * MemoryLayout<Float>.size) audioFormat.mReserved = 0 status = AudioUnitSetProperty(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, kOutputBus, &audioFormat, UInt32(MemoryLayout<UInt32>.size)) status = AudioUnitSetProperty(au, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, UInt32(MemoryLayout<UInt32>.size)) // Set input/recording callback var inputCallbackStruct = AURenderCallbackStruct(inputProc: recordingCallback, inputProcRefCon: UnsafeMutableRawPointer(Unmanaged.passUnretained(self).toOpaque())) status = AudioUnitSetProperty(au, AudioUnitPropertyID(kAudioOutputUnitProperty_SetInputCallback), AudioUnitScope(kAudioUnitScope_Global), kInputBus, &inputCallbackStruct, UInt32(MemoryLayout<AURenderCallbackStruct>.size)) // Ask CoreAudio to allocate buffers for us on render. status = AudioUnitSetProperty(au, AudioUnitPropertyID(kAudioUnitProperty_ShouldAllocateBuffer), AudioUnitScope(kAudioUnitScope_Output), kInputBus, &one, UInt32(MemoryLayout<UInt32>.size)) flag = Int(status) } let recordingCallback: AURenderCallback = { ( inRefCon, ioActionFlags, inTimeStamp, inBusNumber, frameCount, ioData ) -> OSStatus in let audioObject = unsafeBitCast(inRefCon, to: AudioUnitListener.self) var err: OSStatus = noErr // set mData to nil, AudioUnitRender() should be allocating buffers var bufferList = AudioBufferList( mNumberBuffers: 1, mBuffers: AudioBuffer( mNumberChannels: UInt32(2), mDataByteSize: 16, mData: nil)) if let au = audioObject.audioUnit { err = AudioUnitRender(au,