@chenlai89
2018-05-30T03:08:45.000000Z
字数 181223
阅读 1075
kindle
/** Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.** Use of this source code is governed by a BSD-style license* that can be found in the LICENSE file in the root of the source* tree. An additional intellectual property rights grant can be found* in the file PATENTS. All contributing project authors may* be found in the AUTHORS file in the root of the source tree.*/#include "audio_device_utility.h"#include "audio_device_mac.h"#include "audio_device_config.h"#include "event_wrapper.h"#include "trace.h"#include "thread_wrapper.h"#include <cassert>#include <sys/sysctl.h> // sysctlbyname()#include <mach/mach.h> // mach_task_self()#include <libkern/OSAtomic.h> // OSAtomicCompareAndSwap()#include "portaudio/pa_ringbuffer.h"#include <CoreServices/CoreServices.h>#include <mach/mach_time.h>#include <IOKit/IOKitLib.h>#include <IOKit/usb/IOUSBLib.h>#include <IOKit/IOCFPlugIn.h>//#define TRACKDEVICEDELAY//#define DEVICE_THREAD_EXCEPTION //1. add a protection for the case: sometimes stop device but callback thread still run; 2. attation: global variablenamespace webrtc{#define __MAC_OS_X_VERSION_MAX_ALLOWED 1050#define WEBRTC_CA_RETURN_ON_ERR(expr) \do { \err = expr; \if (err != noErr) { \logCAMsg(kTraceError, kTraceAudioDevice, _id, \"Error in " #expr, (const char *)&err); \return -1; \} \} while(0)#define WEBRTC_CA_LOG_ERR(expr) \do { \err = expr; \if (err != noErr) { \logCAMsg(kTraceError, kTraceAudioDevice, _id, \"Error in " #expr, (const char *)&err); \} \} while(0)#define WEBRTC_CA_LOG_WARN(expr) \do { \err = expr; \if (err != noErr) { \logCAMsg(kTraceWarning, kTraceAudioDevice, _id, \"Error in " #expr, (const char *)&err); \} \} while(0)enum{MaxNumberDevices = 64};enum{MaxNoCallbacktime = 30000 //ms};typedef struct{int8_t *p_name; //device name in utf8uint32_t len_of_name; //the length of device nameint8_t *p_unique_id; //this id is unique for different device in utf8uint32_t len_of_unique_id; //length of unique idbool bCombo; /// if a compound deviceint num_of_devices; /// how many devices are compounded} SSB_MC_DATA_BLOCK_AUDIO_DEVICE_DES, *PSSB_MC_DATA_BLOCK_AUDIO_DEVICE_DES;#ifdef BUILD_FOR_BBMstatic const char* ZoomAudioDeviceName = "BBMAudioDevice";static const char* ZoomAudioDeviceName2 = "BBM";#elsestatic const char* ZoomAudioDeviceName = "ZoomAudioDevice";static const char* ZoomAudioDeviceName2 = "Zoom-";#endif#ifdef BUILD_FOR_MIMOstatic const char* BlackmagicAudioName = "Blackmagic";static const char* MagewellAudioName = "XI100DUSB-HDMI";#endif#ifdef DEVICE_THREAD_EXCEPTIONstruct RunDeviceInfo{AudioDeviceID DeviceID;AudioDeviceIOProcID DeviceIOProcID;int errorCount;bool Stopped;};RunDeviceInfo RunMicrophoneInfo;RunDeviceInfo RunSpeakerInfo;#endifstatic bool MacOSMountainLionOrUpper(){UInt32 version;return ((Gestalt(gestaltSystemVersion, (SInt32*) &version) == noErr) && (version >= 0x1080));}void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue){while (1){int32_t oldValue = *theValue;if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue)== true){return;}}}int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue){while (1){WebRtc_Word32 value = *theValue;if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true){return value;}}}// CoreAudio errors are best interpreted as four character strings.void AudioDeviceMac::logCAMsg(const TraceLevel level,const TraceModule module,const WebRtc_Word32 id, const char *msg,const char *err){assert(msg != NULL);assert(err != NULL);#ifdef WEBRTC_BIG_ENDIANWEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);#else// We need to flip the characters in this case.WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err+ 2, err + 1, err);#endif}AudioDeviceMac::AudioDeviceMac(const WebRtc_Word32 id) :_ptrAudioBuffer(NULL),_critSect(*CriticalSectionWrapper::CreateCriticalSection()),_critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),_critSectPlayFormatChange(*CriticalSectionWrapper::CreateCriticalSection()),_critSectNotify(*CriticalSectionWrapper::CreateCriticalSection()),_critSectFormatChange(NULL),_stopEventRec(*EventWrapper::Create()),_stopEvent(*EventWrapper::Create()),_captureWorkerThread(NULL),_renderWorkerThread(NULL),_captureWorkerThreadId(0),_renderWorkerThreadId(0),_id(id),_mixerManager(id),_inputDeviceIndex(0),_inputDeviceIndexUI(0),_usingDeviceType(false),_outputDeviceIndex(0),_inputDeviceID(kAudioObjectUnknown),_outputDeviceID(kAudioObjectUnknown),_inputDeviceIsSpecified(false),_outputDeviceIsSpecified(false),_recChannels(N_REC_CHANNELS),_playChannels(N_PLAY_CHANNELS),_captureBufData(NULL),_renderBufData(NULL),_playBufType(AudioDeviceModule::kFixedBufferSize),_initialized(false),_isShutDown(false),_recording(false),_playing(false),_recIsInitialized(false),_playIsInitialized(false),_startRec(false),_stopRec(false),_stopPlay(false),_AGC(false),_renderDeviceIsAlive(1),_captureDeviceIsAlive(1),_doStop(false),_doStopRec(false),_macBookPro(false),_macBookProPanRight(false),_captureLatencyUs(0),_renderLatencyUs(0),_captureDelayUs(0),_renderDelayUs(0),_captureDelayUsUpdate(0),_captureDelayUsPrevious(0),_renderDelayOffsetSamples(0),_playBufDelayFixed(20),_playWarning(0),_playError(0),_recWarning(0),_recError(0),_loopbackrecError(0),_paCaptureBuffer(NULL),_paRenderBuffer(NULL),_captureBufSizeSamples(0),_renderBufSizeSamples(0),_pDeviceChangeNotify(0),#ifdef CHECKTIMESTAMPERROR_timestampErrorCount(0),_bCheckTimestampError(true),#endif//loopback record_loopbackDeviceIsInitialized(false),_usingLoopbackDeviceIndex(0),_loopbackDeviceIndex(0),_zoomDeviceSpeakerIndex(0),_zoomDeviceMicIndex(0),_loopbackDeviceIsSpecified(false),_loopbackRecording(false),_zoomDeviceSpeakerID(kAudioObjectUnknown),_zoomDeviceMicID(kAudioObjectUnknown),_SystemDefaultSpeakerID(kAudioObjectUnknown),_loopbackRecIsInitialized(false),_loopbackCaptureBufData(NULL),_paLoopbackCaptureBuffer(NULL),_loopbackCaptureDeviceIsAlive(1),_doStopLoopbackRec(false),_loopbackRecChannels(2),_loopbackCaptureBufDataReadIndex(0),_loopbackCaptureBufDataWriteIndex(0),_loopbackCaptureAvailbaleBufData(0),#ifdef BUILD_FOR_MIMO_LoopBackDeviceSource(kDefaultLoopbackSource),_loopbackCaptureBufDataBM(NULL),#endif_stopEventLoopbackRec(*EventWrapper::Create()),_loopbackCaptureWorkerThread(NULL),_loopbackCaptureWorkerThreadId(0),_loopbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),_zoomDeviceBufferCritSect(*CriticalSectionWrapper::CreateCriticalSection()),_bAudioShareStatus(false),_need_detect(true),_need_detect_play(true),//loopback record_recSameDevice(true),_recWaitErrorCount(0),_playWaitErrorCount(0),_capConvertFailCount(0),_playConvertFailCount(0),_bBuiltinMic(false),_bBuiltinSpk(false),_bHDMISpk(false),_bBlueSpk(false),_bBlueMic(false),_outputTargetLevelDB(0),_optVolDB(0),_enableSpkVolumeCheck(false),_speakerVolumeDB(0),_spkVolumeCheckFreq(0),_playCallbackHappened(false),_stopEventRecAgain(*EventWrapper::Create()),_stopEventAgain(*EventWrapper::Create()),_MicrophoneStartTime(0),_SpeakerStartTime(0),_recordCallbackHappened(false),_loopbackLocalSpeakerPlay(true),_bMicrophoneDefaultStreamFormatChanged(false),_bUseExclusiveMode(false),_bDefaultSpeakerIsMuted(false),_InputTimeNs(0),_NowTimeNs(0),_recDataInputTimeNs(0),_msecOnRecordSide(0),#ifdef MUTI_MICROPHONE_SUPPORT_pTmpRecordBuffer(NULL),_tmpRecordBufferSize(0),_bMutilChannelsMic(false),#endif_msecOnPlaySide(0){WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,"%s created", __FUNCTION__);assert(&_stopEvent != NULL);assert(&_stopEventRec != NULL);memset(_renderConvertData, 0, sizeof(_renderConvertData));memset(_inputDevName,0,sizeof(_inputDevName));memset(_outputDevName,0,sizeof(_outputDevName));memset(_inputDevGuid,0,sizeof(_inputDevGuid));memset(_outputDevGuid,0,sizeof(_outputDevGuid));memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_loopbackStreamFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_loopbackDesiredFormat, 0, sizeof(AudioStreamBasicDescription));memset(&_microphoneDefaultStreamFormat, 0, sizeof(AudioStreamBasicDescription));}AudioDeviceMac::~AudioDeviceMac(){WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,"%s destroyed", __FUNCTION__);if (!_isShutDown){Terminate();}if (_captureWorkerThread){delete _captureWorkerThread;_captureWorkerThread = NULL;}if (_renderWorkerThread){delete _renderWorkerThread;_renderWorkerThread = NULL;}if (_paRenderBuffer){delete _paRenderBuffer;_paRenderBuffer = NULL;}if (_paCaptureBuffer){delete _paCaptureBuffer;_paCaptureBuffer = NULL;}if (_renderBufData){delete[] _renderBufData;_renderBufData = NULL;}if (_captureBufData){delete[] _captureBufData;_captureBufData = NULL;}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_destroy() error: %d", kernErr);}kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_destroy() error: %d", kernErr);}delete &_stopEvent;delete &_stopEventRec;delete &_critSect;delete &_critSectCb;delete &_critSectNotify;delete &_critSectPlayFormatChange;if (_critSectFormatChange != NULL){delete _critSectFormatChange;}delete &_stopEventLoopbackRec;delete &_loopbackCritSect;delete &_zoomDeviceBufferCritSect;delete &_stopEventRecAgain;delete &_stopEventAgain;#ifdef MUTI_MICROPHONE_SUPPORTif(NULL != _pTmpRecordBuffer){delete [] _pTmpRecordBuffer;_pTmpRecordBuffer = NULL;}#endif}// ============================================================================// API// ============================================================================void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);_ptrAudioBuffer = audioBuffer;// inform the AudioBuffer about default settings for this implementation_ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);_ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);_ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);_ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);}WebRtc_Word32 AudioDeviceMac::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);audioLayer = AudioDeviceModule::kPlatformDefaultAudio;return 0;}#ifdef MUTI_MICROPHONE_SUPPORTWebRtc_Word32 AudioDeviceMac::Init(bool master)#elseWebRtc_Word32 AudioDeviceMac::Init()#endif{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (_initialized){return 0;}OSStatus err = noErr;_isShutDown = false;// PortAudio ring buffers require an elementCount which is a power of two.if (_renderBufData == NULL){UInt32 powerOfTwo = 1;while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES){powerOfTwo <<= 1;}_renderBufSizeSamples = powerOfTwo;_renderBufData = new SInt16[_renderBufSizeSamples];}if (_paRenderBuffer == NULL){_paRenderBuffer = new PaUtilRingBuffer;ring_buffer_size_t bufSize = -1;bufSize = PaUtil_InitializeRingBuffer(_paRenderBuffer, sizeof(SInt16),_renderBufSizeSamples,_renderBufData);if (bufSize == -1){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " PaUtil_InitializeRingBuffer() error");return -1;}}if (_captureBufData == NULL){UInt32 powerOfTwo = 1;while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES){powerOfTwo <<= 1;}_captureBufSizeSamples = powerOfTwo;_captureBufData = new Float32[_captureBufSizeSamples];}if (_paCaptureBuffer == NULL){_paCaptureBuffer = new PaUtilRingBuffer;ring_buffer_size_t bufSize = -1;bufSize = PaUtil_InitializeRingBuffer(_paCaptureBuffer,sizeof(Float32),_captureBufSizeSamples,_captureBufData);if (bufSize == -1){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " PaUtil_InitializeRingBuffer() error");return -1;}}if (_renderWorkerThread == NULL){_renderWorkerThread= ThreadWrapper::CreateThread(RunRender, this, kRealtimePriority,"RenderWorkerThread");if (_renderWorkerThread == NULL){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " Render CreateThread() error");return -1;}}if (_captureWorkerThread == NULL){_captureWorkerThread= ThreadWrapper::CreateThread(RunCapture, this, kRealtimePriority,"CaptureWorkerThread");if (_captureWorkerThread == NULL){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " Capture CreateThread() error");return -1;}}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,SYNC_POLICY_FIFO, 0);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id," semaphore_create() error: %d", kernErr);return -1;}kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,SYNC_POLICY_FIFO, 0);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id," semaphore_create() error: %d", kernErr);return -1;}// Setting RunLoop to NULL here instructs HAL to manage its own thread for// notifications. This was the default behaviour on OS X 10.5 and earlier, but now// must be explicitly specified. HAL would otherwise try to use the main thread to// issue notifications.AudioObjectPropertyAddress propertyAddress = {kAudioHardwarePropertyRunLoop,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };CFRunLoopRef runLoop = NULL;UInt32 size = sizeof(CFRunLoopRef);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, size, &runLoop));// Listen for any device changes.propertyAddress.mSelector = kAudioHardwarePropertyDevices;WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(kAudioObjectSystemObject,&propertyAddress, &objectListenerProc, this));// Determine if this is a MacBook Pro_macBookPro = false;_macBookProPanRight = false;char buf[128];size_t length = sizeof(buf);memset(buf, 0, length);int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);if (intErr != 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Error in sysctlbyname(): %d", err);} else{WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Hardware model: %s", buf);if (strncmp(buf, "MacBookPro8", 11) == 0)// if (strncmp(buf, "MacBookPro", 10) == 0){// _macBookPro = true;}}#ifdef MUTI_MICROPHONE_SUPPORTif (master){_pdeviceNotifier = new CAudioDevicesNotifier;CheckAndReplaceZoomDevice();}else{_pdeviceNotifier = NULL;}#elseCheckAndReplaceZoomDevice();#endif_playWarning = 0;_playError = 0;_recWarning = 0;_recError = 0;_loopbackrecError = 0;_initialized = true;m_StopDeviceQueue = dispatch_queue_create("StopDeviceQueue", NULL);;return 0;}WebRtc_Word32 AudioDeviceMac::Terminate(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);if (!_initialized){return 0;}int retVal = 0;if (_recording){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Recording must be stopped");StopRecording();retVal = -1;}if (_playing){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Playback must be stopped");StopPlayout();retVal = -1;}#ifdef DEVICE_THREAD_EXCEPTIONRunMicrophoneInfo.Stopped = true;RunSpeakerInfo.Stopped = true;#endif_critSect.Enter();_mixerManager.Close();OSStatus err = noErr;AudioObjectPropertyAddress propertyAddress = {kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(kAudioObjectSystemObject,&propertyAddress, &objectListenerProc, this));err = AudioHardwareUnload();if (err != noErr){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Error in AudioHardwareUnload()", (const char*) &err);retVal = -1;}_critSect.Leave();#ifdef MUTI_MICROPHONE_SUPPORTif (_pdeviceNotifier){delete _pdeviceNotifier;_pdeviceNotifier = NULL;}#endif_isShutDown = true;_initialized = false;_outputDeviceIsSpecified = false;_inputDeviceIsSpecified = false;if (m_StopDeviceQueue){dispatch_release(m_StopDeviceQueue);}return retVal;}bool AudioDeviceMac::Initialized() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_initialized);}WebRtc_Word32 AudioDeviceMac::SpeakerIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool wasInitialized = _mixerManager.SpeakerIsInitialized();// Make an attempt to open up the// output mixer corresponding to the currently selected output device.//if (!wasInitialized && InitSpeaker() == -1){available = false;return 0;}// Given that InitSpeaker was successful, we know that a valid speaker exists//available = true;// Close the initialized output mixer//if (!wasInitialized){_mixerManager.CloseSpeaker();}return 0;}WebRtc_Word32 AudioDeviceMac::InitSpeaker(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (_playing){return -1;}if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1){return -1;}if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1){return -1;}#ifdef MUTI_MICROPHONE_SUPPORTif (_pdeviceNotifier){_pdeviceNotifier->SetUsedDeviceID(IAudioDeviceChangeNotify::kRender,_outputDeviceID);}#else_deviceNotifier.SetUsedDeviceID(IAudioDeviceChangeNotify::kRender,_outputDeviceID);#endifWEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," InitSpeaker() _inputDeviceID:%d",_outputDeviceID);_bBuiltinSpk = false;_bHDMISpk = false;_bBlueSpk = false;UInt32 transportType;AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyTransportType,kAudioDevicePropertyScopeOutput, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &transportType);if (err == noErr) {if (transportType == 'bltn') {_bBuiltinSpk = true;}if (transportType == 'hdmi') {_bHDMISpk = true;}if (transportType == 'blue'){_bBlueSpk = true;}}return 0;}bool AudioDeviceMac::BuiltInSpk(){if (SpeakerIsInitialized()){return _bBuiltinSpk;}return false;}bool AudioDeviceMac::HDMISpk(){if (SpeakerIsInitialized()){return _bHDMISpk;}return false;}bool AudioDeviceMac::BlueSpk(){if (SpeakerIsInitialized()){return _bBlueSpk;}return false;}WebRtc_Word32 AudioDeviceMac::SetOutputTargetLevelDB(int32_t optVolDB,int targetLevelDB){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s targetLevelDB = %d, optVolDB = %d", __FUNCTION__, targetLevelDB, optVolDB);_optVolDB = optVolDB;_outputTargetLevelDB = targetLevelDB;return 0;}WebRtc_Word32 AudioDeviceMac::EnableSpeakerVolumeCheck(bool bEnable){_enableSpkVolumeCheck = bEnable;return 0;}WebRtc_Word32 AudioDeviceMac::SpeakerVolumeWithDB(WebRtc_Word32& speakerVolumeDB){return _mixerManager.SpeakerVolumeWithDB(speakerVolumeDB);}WebRtc_Word32 AudioDeviceMac::MicrophoneIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool wasInitialized = _mixerManager.MicrophoneIsInitialized();// Make an attempt to open up the// input mixer corresponding to the currently selected output device.//if (!wasInitialized && InitMicrophone() == -1){available = false;return 0;}// Given that InitMicrophone was successful, we know that a valid microphone exists//available = true;// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseMicrophone();}return 0;}WebRtc_Word32 AudioDeviceMac::InitMicrophone(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (_recording){return -1;}if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1){return -1;}if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1){return -1;}#ifdef MUTI_MICROPHONE_SUPPORTif (_pdeviceNotifier){_pdeviceNotifier->SetUsedDeviceID(IAudioDeviceChangeNotify::kCapture,_inputDeviceID);}#else_deviceNotifier.SetUsedDeviceID(IAudioDeviceChangeNotify::kCapture,_inputDeviceID);#endifWEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," InitMicrophone() _inputDeviceID:%d",_inputDeviceID);_bBuiltinMic = false;_bBlueMic = false;UInt32 transportType;AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyTransportType,kAudioDevicePropertyScopeInput, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &transportType);if (err == noErr) {if (transportType == 'bltn') {_bBuiltinMic = true;}if (transportType == 'blue'){_bBlueMic = true;}}return 0;}bool AudioDeviceMac::BuiltInMic(){if (MicrophoneIsInitialized()) {return _bBuiltinMic;}return false;}int32_t AudioDeviceMac::get_device_property(WebRtc_Word32 type,WebRtc_Word32 index,WebRtc_Word32 prop,void * p_data, int32_t size_of_data){switch(prop){case 3:{if (type == 0){*((bool *)p_data) = _bBlueSpk;}else{*((bool *)p_data) = _bBlueMic;}}break;default:break;}return 0;}bool AudioDeviceMac::SpeakerIsInitialized() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_mixerManager.SpeakerIsInitialized());}bool AudioDeviceMac::MicrophoneIsInitialized() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_mixerManager.MicrophoneIsInitialized());}WebRtc_Word32 AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);available = false;bool wasInitialized = _mixerManager.SpeakerIsInitialized();// Make an attempt to open up the// output mixer corresponding to the currently selected output device.//if (!wasInitialized && InitSpeaker() == -1){// If we end up here it means that the selected speaker has no volume// control.available = false;return 0;}_mixerManager.SpeakerVolumeIsAvailable(available);// Close the initialized output mixer//if (!wasInitialized){_mixerManager.CloseSpeaker();}return 0;}WebRtc_Word32 AudioDeviceMac::SetSpeakerVolume(WebRtc_UWord32 volume,bool bSys){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetSpeakerVolume(volume=%u)", volume);return (_mixerManager.SetSpeakerVolume(volume,bSys));}WebRtc_Word32 AudioDeviceMac::SpeakerVolume(WebRtc_UWord32& volume,bool bSys) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 level(0);if (_mixerManager.SpeakerVolume(level,bSys) == -1){return -1;}volume = level;return 0;}WebRtc_Word32 AudioDeviceMac::SetWaveOutVolume(WebRtc_UWord16 volumeLeft,WebRtc_UWord16 volumeRight){WEBRTC_TRACE(kTraceModuleCall,kTraceAudioDevice,_id,"AudioDeviceMac::SetWaveOutVolume(volumeLeft=%u, volumeRight=%u)",volumeLeft, volumeRight);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," API call not supported on this platform");return -1;}WebRtc_Word32AudioDeviceMac::WaveOutVolume(WebRtc_UWord16& /*volumeLeft*/,WebRtc_UWord16& /*volumeRight*/) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," API call not supported on this platform");return -1;}WebRtc_Word32 AudioDeviceMac::MaxSpeakerVolume(WebRtc_UWord32& maxVolume,bool bSys) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 maxVol(0);if (_mixerManager.MaxSpeakerVolume(maxVol,bSys) == -1){return -1;}maxVolume = maxVol;return 0;}WebRtc_Word32 AudioDeviceMac::MinSpeakerVolume(WebRtc_UWord32& minVolume) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 minVol(0);if (_mixerManager.MinSpeakerVolume(minVol) == -1){return -1;}minVolume = minVol;return 0;}WebRtc_Word32AudioDeviceMac::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord16 delta(0);if (_mixerManager.SpeakerVolumeStepSize(delta) == -1){return -1;}stepSize = delta;return 0;}WebRtc_Word32 AudioDeviceMac::SpeakerMuteIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool isAvailable(false);bool wasInitialized = _mixerManager.SpeakerIsInitialized();// Make an attempt to open up the// output mixer corresponding to the currently selected output device.//if (!wasInitialized && InitSpeaker() == -1){// If we end up here it means that the selected speaker has no volume// control, hence it is safe to state that there is no mute control// already at this stage.available = false;return 0;}// Check if the selected speaker has a mute control//_mixerManager.SpeakerMuteIsAvailable(isAvailable);available = isAvailable;// Close the initialized output mixer//if (!wasInitialized){_mixerManager.CloseSpeaker();}return 0;}WebRtc_Word32 AudioDeviceMac::SetSpeakerMute(bool enable,bool bDefault){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetSpeakerMute(enable=%u)", enable);return (_mixerManager.SetSpeakerMute(enable));}WebRtc_Word32 AudioDeviceMac::SpeakerMute(bool& enabled,bool bDefault) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool muted(0);if (_mixerManager.SpeakerMute(muted) == -1){return -1;}enabled = muted;return 0;}WebRtc_Word32 AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool isAvailable(false);bool wasInitialized = _mixerManager.MicrophoneIsInitialized();// Make an attempt to open up the// input mixer corresponding to the currently selected input device.//if (!wasInitialized && InitMicrophone() == -1){// If we end up here it means that the selected microphone has no volume// control, hence it is safe to state that there is no boost control// already at this stage.available = false;return 0;}// Check if the selected microphone has a mute control//_mixerManager.MicrophoneMuteIsAvailable(isAvailable);available = isAvailable;// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseMicrophone();}return 0;}WebRtc_Word32 AudioDeviceMac::SetMicrophoneMute(bool enable){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceWindowsWave::SetMicrophoneMute(enable=%u)", enable);return (_mixerManager.SetMicrophoneMute(enable));}WebRtc_Word32 AudioDeviceMac::MicrophoneMute(bool& enabled) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool muted(0);if (_mixerManager.MicrophoneMute(muted) == -1){return -1;}enabled = muted;return 0;}WebRtc_Word32 AudioDeviceMac::MicrophoneBoostIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool isAvailable(false);bool wasInitialized = _mixerManager.MicrophoneIsInitialized();// Enumerate all avaliable microphone and make an attempt to open up the// input mixer corresponding to the currently selected input device.//if (!wasInitialized && InitMicrophone() == -1){// If we end up here it means that the selected microphone has no volume// control, hence it is safe to state that there is no boost control// already at this stage.available = false;return 0;}// Check if the selected microphone has a boost control//_mixerManager.MicrophoneBoostIsAvailable(isAvailable);available = isAvailable;// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseMicrophone();}return 0;}WebRtc_Word32 AudioDeviceMac::SetMicrophoneBoost(bool enable){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetMicrophoneBoost(enable=%u)", enable);return (_mixerManager.SetMicrophoneBoost(enable));}WebRtc_Word32 AudioDeviceMac::MicrophoneBoost(bool& enabled){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool onOff(0);if (_mixerManager.MicrophoneBoost(onOff) == -1){return -1;}enabled = onOff;return 0;}WebRtc_Word32 AudioDeviceMac::StereoRecordingIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool isAvailable(false);bool wasInitialized = _mixerManager.MicrophoneIsInitialized();if (!wasInitialized && InitMicrophone() == -1){// Cannot open the specified deviceavailable = false;return 0;}// Check if the selected microphone can record stereo//_mixerManager.StereoRecordingIsAvailable(isAvailable);available = isAvailable;// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseMicrophone();}return 0;}WebRtc_Word32 AudioDeviceMac::SetStereoRecording(bool enable){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetStereoRecording(enable=%u)", enable);if (enable)_recChannels = 2;else_recChannels = 1;return 0;}WebRtc_Word32 AudioDeviceMac::StereoRecording(bool& enabled) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);if (_recChannels == 2)enabled = true;elseenabled = false;return 0;}WebRtc_Word32 AudioDeviceMac::StereoPlayoutIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);bool isAvailable(false);bool wasInitialized = _mixerManager.SpeakerIsInitialized();if (!wasInitialized && InitSpeaker() == -1){// Cannot open the specified deviceavailable = false;return 0;}// Check if the selected microphone can record stereo//_mixerManager.StereoPlayoutIsAvailable(isAvailable);available = isAvailable;// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseSpeaker();}return 0;}WebRtc_Word32 AudioDeviceMac::SetStereoPlayout(bool enable){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetStereoPlayout(enable=%u)", enable);if (enable)_playChannels = 2;else_playChannels = 1;return 0;}WebRtc_Word32 AudioDeviceMac::StereoPlayout(bool& enabled) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);if (_playChannels == 2)enabled = true;elseenabled = false;return 0;}WebRtc_Word32 AudioDeviceMac::SetAGC(bool enable){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetAGC(enable=%d)", enable);_AGC = enable;return 0;}bool AudioDeviceMac::AGC() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return _AGC;}WebRtc_Word32 AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);available = false;bool wasInitialized = _mixerManager.MicrophoneIsInitialized();// Make an attempt to open up the// input mixer corresponding to the currently selected output device.//if (!wasInitialized && InitMicrophone() == -1){// If we end up here it means that the selected microphone has no volume// control.available = false;return 0;}_mixerManager.MicrophoneVolumeIsAvailable(available);// Close the initialized input mixer//if (!wasInitialized){_mixerManager.CloseMicrophone();}return 0;}WebRtc_Word32 AudioDeviceMac::SetMicrophoneVolume(WebRtc_UWord32 volume){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetMicrophoneVolume(volume=%u)", volume);return (_mixerManager.SetMicrophoneVolume(volume));}WebRtc_Word32 AudioDeviceMac::MicrophoneVolume(WebRtc_UWord32& volume) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 level(0);if (_mixerManager.MicrophoneVolume(level) == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," failed to retrive current microphone level");return -1;}volume = level;return 0;}WebRtc_Word32AudioDeviceMac::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 maxVol(0);if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1){return -1;}maxVolume = maxVol;return 0;}WebRtc_Word32AudioDeviceMac::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord32 minVol(0);if (_mixerManager.MinMicrophoneVolume(minVol) == -1){return -1;}minVolume = minVol;return 0;}WebRtc_Word32AudioDeviceMac::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WebRtc_UWord16 delta(0);if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1){return -1;}stepSize = delta;return 0;}WebRtc_Word16 AudioDeviceMac::PlayoutDevices(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);AudioDeviceID playDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_Word32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,MaxNumberDevices,ZoomAudioDeviceNum);if (nDevices > 0){nDevices -= ZoomAudioDeviceNum;}return nDevices;}WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(WebRtc_UWord16 index){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetPlayoutDevice(index=%u)", index);if (_playIsInitialized){return -1;}AudioDeviceID playDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_Word32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,playDevices, MaxNumberDevices,ZoomAudioDeviceNum);if (nDevices > 0){nDevices -= ZoomAudioDeviceNum;}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," number of availiable waveform-audio output devices is %u",nDevices);if (index == (WebRtc_UWord16)-1){}else if (index > (nDevices - 1)){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," device index is out of range [0,%u]", (nDevices - 1));return -1;}_outputDeviceIndex = index;_outputDeviceIsSpecified = true;return 0;}WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"WindowsDeviceType not supported");if (device == AudioDeviceModule::kDefaultCommunicationDevice){if(0 == SetPlayoutDevice(0)){return 0;}else{return -1;}}else if (device == AudioDeviceModule::kDefaultDevice){if(0 == SetPlayoutDevice((WebRtc_UWord16)-1)){return 0;}else{return -1;}}return 0;}WebRtc_Word32 AudioDeviceMac::GetPlayoutDevice(int &index){if (_outputDeviceIsSpecified){index = _outputDeviceIndex;return 0;}return -1;}WebRtc_Word32 AudioDeviceMac::PlayoutDeviceName(WebRtc_UWord16 index,WebRtc_Word8 name[kAdmMaxDeviceNameSize],WebRtc_Word8 guid[kAdmMaxGuidSize]){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::PlayoutDeviceName(index=%u)", index);if (index == (WebRtc_UWord16)-3){if (name != NULL){memcpy(name,_outputDevName,kAdmMaxDeviceNameSize);if (!_playing){memset(name,0,kAdmMaxDeviceNameSize);}}if (guid != NULL){memcpy(guid,_outputDevGuid,kAdmMaxGuidSize);if (!_playing){memset(guid,0,kAdmMaxGuidSize);}/// mac change id when usb device plug out and in. so here use name/*memset(guid, 0, kAdmMaxGuidSize);if (_playing){sprintf(guid, "%d", (int)_outputDeviceID);}*/}return 0;}if (index == (WebRtc_UWord16)-1){index = 0;}else if (index == (WebRtc_UWord16)-2){memset(name, 0, kAdmMaxDeviceNameSize);if (guid != NULL){memset(guid, 0, kAdmMaxGuidSize);}return GetDeviceName(kAudioDevicePropertyScopeOutput, -1, name, guid);}memset(name, 0, kAdmMaxDeviceNameSize);if (guid != NULL){memset(guid, 0, kAdmMaxGuidSize);}#if 0const WebRtc_UWord16 nDevices(PlayoutDevices());if ((index > (nDevices - 1)) || (name == NULL)){return -1;}CheckAndIncreaseZoomDevice(kAudioDevicePropertyScopeOutput,nDevices,index);return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name, guid);#elsereturn GetReListDeviceName(kAudioDevicePropertyScopeOutput, index, name, guid);#endif}WebRtc_Word32 AudioDeviceMac::AllRelistDevice(const AudioObjectPropertyScope scope,WebRtc_Word8 name[][kAdmMaxDeviceNameSize],WebRtc_Word8 guid[][kAdmMaxGuidSize],AudioDeviceID scopedDeviceIds[],int& number){int outSize = number;AudioDeviceID deviceIds[MaxNumberDevices];char deviceNames[MaxNumberDevices][kAdmMaxDeviceNameSize];WebRtc_UWord8 ZoomAudioDeviceNum = 0;int numberDevices = GetNumberDevices(scope, deviceIds,MaxNumberDevices,ZoomAudioDeviceNum,false);WebRtc_Word32 index1 = 0,index2 = 0;// index1 is to read from system, and index2 is write to name.OSStatus err = noErr;UInt32 len = kAdmMaxDeviceNameSize;AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0 };for (int i = 0; i < numberDevices; i++){UInt32 len = kAdmMaxDeviceNameSize;AudioObjectGetPropertyData( deviceIds[i],&propertyAddress, 0, NULL, &len, deviceNames[i]);}for (index1 = 0,index2 = 0; index1 < numberDevices && index2 < outSize;index1++){char devName[128];char devUniqueID[128];memset(devName, 0, sizeof(devName));memset(devUniqueID, 0, sizeof(devUniqueID));AudioDeviceID usedID = deviceIds[index1];UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddressTransportType = { kAudioDevicePropertyTransportType,scope, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(usedID,&propertyAddressTransportType, 0, NULL, &size, &transportType);strncpy(devName,deviceNames[index1],sizeof(devName));uint16_t sameDeviceNameCount = 0;for (int i = 0; i < index1; i++){if (strcmp(devName,deviceNames[i]) == 0){sameDeviceNameCount++;}}if (sameDeviceNameCount != 0){char sourceName[128];memset(sourceName,0,128);sprintf(sourceName,"#%d",sameDeviceNameCount);if ((strlen(devName) + strlen(sourceName)) < kAdmMaxDeviceNameSize){strcat(devName,sourceName);}}strncpy(devUniqueID,devName,sizeof(devUniqueID));propertyAddress.mSelector = kAudioDevicePropertyDataSource;Boolean hasProperty = AudioObjectHasProperty(usedID,&propertyAddress);if(hasProperty){UInt32 dataSource = 0;UInt32 size = sizeof(dataSource);if(noErr == AudioObjectGetPropertyData(usedID,&propertyAddress, 0, NULL, &size, &dataSource)){AudioValueTranslation trans;CFStringRef str = NULL;Boolean ok;trans.mInputData = &dataSource;trans.mInputDataSize = sizeof(UInt32);trans.mOutputData = &str;trans.mOutputDataSize = sizeof(CFStringRef);propertyAddress.mSelector = kAudioDevicePropertyDataSourceNameForIDCFString;size = sizeof(AudioValueTranslation);if(AudioObjectGetPropertyData(usedID,&propertyAddress,0,NULL,&size,&trans)==noErr){char sourceName[128];if(str != NULL && CFStringGetCString(str, sourceName, 128, kCFStringEncodingUTF8)){if ((strlen(devName) + strlen(sourceName) + 3) < kAdmMaxDeviceNameSize){strcat(devName, " (");strcat(devName, sourceName);strcat(devName, ")");}}}if(str)CFRelease(str);}}devName[kAdmMaxDeviceNameSize - 1] = '\0';#ifdef BUILD_FOR_MIMOif ((strstr(devName, ZoomAudioDeviceName2) == 0) && (strstr(devName,BlackmagicAudioName) == 0)/* && (strstr(devName,MagewellAudioName) == 0)*/&& (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) != 0))#elseif (strstr(devName, ZoomAudioDeviceName2) == 0 && (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) != 0))#endif{strncpy(name[index2],devName,kAdmMaxDeviceNameSize);if (transportType == 'bltn'){strncpy(guid[index2],devUniqueID,kAdmMaxDeviceNameSize);}else{strncpy(guid[index2],devName,kAdmMaxDeviceNameSize);}scopedDeviceIds[index2] = usedID;index2++;}}number = index2;return 0;}WebRtc_Word32 AudioDeviceMac::GetReListDeviceName(const AudioObjectPropertyScope scope,WebRtc_UWord16 index, char* name, char* deviceID){char deviceIds1[MaxNumberDevices][kAdmMaxDeviceNameSize];char deviceNames1[MaxNumberDevices][kAdmMaxDeviceNameSize];AudioDeviceID scopedDeviceIds[MaxNumberDevices];int deviceNum = MaxNumberDevices;AllRelistDevice(scope, deviceNames1, deviceIds1, scopedDeviceIds, deviceNum);if(index < deviceNum){strncpy((char*)(name),deviceNames1[index],kAdmMaxDeviceNameSize);strncpy((char*)(deviceID),deviceIds1[index],kAdmMaxDeviceNameSize);return 0;}else{return -1;}}WebRtc_Word32 AudioDeviceMac::AllDeviceName(const AudioObjectPropertyScope scope,char* name,int number){SSB_MC_DATA_BLOCK_AUDIO_DEVICE_DES *p_des = (SSB_MC_DATA_BLOCK_AUDIO_DEVICE_DES *)name;if (!p_des){return -1;}AudioDeviceID deviceIds[MaxNumberDevices];char deviceNames[MaxNumberDevices][kAdmMaxDeviceNameSize];WebRtc_UWord8 ZoomAudioDeviceNum = 0;int numberDevices = GetNumberDevices(scope, deviceIds,MaxNumberDevices,ZoomAudioDeviceNum,false);WebRtc_Word32 index = 0,index2 = 0;// index is to read from system, and index2 is write to name.OSStatus err = noErr;UInt32 len = kAdmMaxDeviceNameSize;AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0 };for (int i = 0; i < numberDevices; i++){UInt32 len = kAdmMaxDeviceNameSize;AudioObjectGetPropertyData( deviceIds[i],&propertyAddress, 0, NULL, &len, deviceNames[i]);}for (index = 0,index2 = 0; index < numberDevices && index2 < number;index++){char devName[128];char devUniqueID[128];memset(devName, 0, sizeof(devName));memset(devUniqueID, 0, sizeof(devUniqueID));AudioDeviceID usedID = deviceIds[index];UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddressTransportType = { kAudioDevicePropertyTransportType,scope, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(usedID,&propertyAddressTransportType, 0, NULL, &size, &transportType);strncpy(devName,deviceNames[index],sizeof(devName));uint16_t sameDeviceNameCount = 0;for (int i = 0; i < index; i++){if (strcmp(devName,deviceNames[i]) == 0){sameDeviceNameCount++;}}if (sameDeviceNameCount != 0){char sourceName[128];memset(sourceName,0,128);sprintf(sourceName,"#%d",sameDeviceNameCount);if ((strlen(devName) + strlen(sourceName)) < kAdmMaxDeviceNameSize){strcat(devName,sourceName);}}strncpy(devUniqueID,devName,sizeof(devUniqueID));propertyAddress.mSelector = kAudioDevicePropertyDataSource;Boolean hasProperty = AudioObjectHasProperty(usedID,&propertyAddress);if(hasProperty){UInt32 dataSource = 0;UInt32 size = sizeof(dataSource);if(noErr == AudioObjectGetPropertyData(usedID,&propertyAddress, 0, NULL, &size, &dataSource)){AudioValueTranslation trans;CFStringRef str = NULL;Boolean ok;trans.mInputData = &dataSource;trans.mInputDataSize = sizeof(UInt32);trans.mOutputData = &str;trans.mOutputDataSize = sizeof(CFStringRef);propertyAddress.mSelector = kAudioDevicePropertyDataSourceNameForIDCFString;size = sizeof(AudioValueTranslation);if(AudioObjectGetPropertyData(usedID,&propertyAddress,0,NULL,&size,&trans)==noErr){char sourceName[128];if(str != NULL && CFStringGetCString(str, sourceName, 128, kCFStringEncodingUTF8)){if ((strlen(devName) + strlen(sourceName) + 3) < kAdmMaxDeviceNameSize){strcat(devName, " (");strcat(devName, sourceName);strcat(devName, ")");}}}if(str)CFRelease(str);}}devName[kAdmMaxDeviceNameSize - 1] = '\0';/// mac change id when usb device plug out and in. so here use name/*if( deviceID ){sprintf(deviceID, "%d", (int)usedID);}*/#ifdef BUILD_FOR_MIMOif ((strstr(devName, ZoomAudioDeviceName2) == 0) && (strstr(devName,BlackmagicAudioName) == 0)/* && (strstr(devName,MagewellAudioName) == 0)*/&& (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) != 0))#elseif (strstr(devName, ZoomAudioDeviceName2) == 0 && (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) != 0))#endif{strncpy((char *)p_des[index2].p_name,devName,p_des[index2].len_of_name);if (transportType == 'bltn'){strncpy((char *)p_des[index2].p_unique_id,devUniqueID,p_des[index2].len_of_unique_id);}else{strncpy((char *)p_des[index2].p_unique_id,devName,p_des[index2].len_of_unique_id);}index2++;}}return 0;}WebRtc_Word32 AudioDeviceMac::AllPlayoutDeviceName(char *strNameUTF8,int number){return AllDeviceName(kAudioDevicePropertyScopeOutput,strNameUTF8,number);}WebRtc_Word32 AudioDeviceMac::RecordingDeviceName(WebRtc_UWord16 index,WebRtc_Word8 name[kAdmMaxDeviceNameSize],WebRtc_Word8 guid[kAdmMaxGuidSize]){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::RecordingDeviceName(index=%u)", index);if (index == (WebRtc_UWord16)-3){if (name != NULL){memcpy(name,_inputDevName,kAdmMaxDeviceNameSize);if (!_recording){memset(name, 0, kAdmMaxDeviceNameSize);}}if (guid != NULL){memcpy(guid,_inputDevGuid,kAdmMaxGuidSize);if (!_recording){memset(guid, 0, kAdmMaxGuidSize);}/// mac change id when usb device plug out and in. so here use name/*memset(guid, 0, kAdmMaxGuidSize);if (_recording){sprintf(guid, "%d", (int)_inputDeviceID);}*/}return 0;}if (index == (WebRtc_UWord16)-1){index = 0;}else if (index == (WebRtc_UWord16)-2){memset(name, 0, kAdmMaxDeviceNameSize);if (guid != NULL){memset(guid, 0, kAdmMaxGuidSize);}return GetDeviceName(kAudioDevicePropertyScopeInput, -1, name, guid);}memset(name, 0, kAdmMaxDeviceNameSize);if (guid != NULL){memset(guid, 0, kAdmMaxGuidSize);}#if 0const WebRtc_UWord16 nDevices(RecordingDevices());if ((index > (nDevices - 1)) || (name == NULL)){return -1;}CheckAndIncreaseZoomDevice(kAudioDevicePropertyScopeInput, nDevices,index);return GetDeviceName(kAudioDevicePropertyScopeInput, index, name, guid);#elsereturn GetReListDeviceName(kAudioDevicePropertyScopeInput, index, name, guid);#endif}WebRtc_Word32 AudioDeviceMac::AllRecordingDeviceName(char *strNameUTF8,int number){return AllDeviceName(kAudioDevicePropertyScopeInput,strNameUTF8,number);}WebRtc_Word32 AudioDeviceMac::AllRecordingDevice(WebRtc_Word8 name[][kAdmMaxDeviceNameSize],WebRtc_Word8 guid[][kAdmMaxGuidSize],int& number){AudioDeviceID scopedDeviceIds[MaxNumberDevices];return AllRelistDevice(kAudioDevicePropertyScopeInput, name, guid, scopedDeviceIds, number);}WebRtc_Word16 AudioDeviceMac::RecordingDevices(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);AudioDeviceID recDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_Word32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,MaxNumberDevices,ZoomAudioDeviceNum);if(nDevices > 0){nDevices -= ZoomAudioDeviceNum;}return nDevices;}WebRtc_Word32 AudioDeviceMac::SetRecordingDevice(WebRtc_UWord16 index,bool use_exclusive_mode){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetRecordingDevice(index=%u)", index);if (_recIsInitialized){return -1;}AudioDeviceID recDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_Word32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,recDevices, MaxNumberDevices,ZoomAudioDeviceNum);if(nDevices > 0){nDevices -= ZoomAudioDeviceNum;}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," number of availiable waveform-audio input devices is %u",nDevices);if (index == (WebRtc_UWord16)-1){}else if(index == (WebRtc_UWord16)-4) /// for ultrasound device{if (nDevices > 0){nDevices = CheckAndRemoveZoomDevice(kAudioDevicePropertyScopeInput, recDevices, nDevices + ZoomAudioDeviceNum);}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"AudioDeviceMac::SetRecordingDevice index -4 removed zoom devices: %u",nDevices);bool bBuiltinMic = false;for(int i = 0; i < nDevices; i++){UInt32 transportType ;AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyTransportType,kAudioDevicePropertyScopeInput, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(recDevices[i],&propertyAddress, 0, NULL, &size, &transportType);if (err == noErr){if (transportType == 'bltn'){/// here judge if plug headphone_inputDeviceIndex = i;_inputDeviceIsSpecified = true;bBuiltinMic = true;_bUseExclusiveMode = use_exclusive_mode;if (_bUseExclusiveMode){AdjustMicrophoneSampleRateBaseDeviceMode();}break;}}}if (bBuiltinMic){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"AudioDeviceMac::SetRecordingDevice index -4 have built in mic index is : %u",_inputDeviceIndex);return 0;}else{WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"AudioDeviceMac::SetRecordingDevice index -4 have not built in mic index is : %u",_inputDeviceIndex);return -1;}}else if (index > (nDevices - 1)){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," device index is out of range [0,%u]", (nDevices - 1));return -1;}_inputDeviceIndex = index;if (!_usingDeviceType){_inputDeviceIndexUI = index;}_inputDeviceIsSpecified = true;_bUseExclusiveMode = use_exclusive_mode;if (_bUseExclusiveMode){AdjustMicrophoneSampleRateBaseDeviceMode();}return 0;}WebRtc_Word32AudioDeviceMac::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device,bool use_exclusive_mode){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"WindowsDeviceType not supported");_usingDeviceType = true;if (device == AudioDeviceModule::kDefaultCommunicationDevice){if(0 == SetRecordingDevice(0)){_inputDeviceIndexUI = -1;_usingDeviceType = false;return 0;}else{_usingDeviceType = false;return -1;}}else if (device == AudioDeviceModule::kDefaultDevice){if(0 == SetRecordingDevice((WebRtc_UWord16)-1)){_inputDeviceIndexUI = -2;_usingDeviceType = false;return 0;}else{_usingDeviceType = false;return -1;}}_usingDeviceType = false;}WebRtc_Word32 AudioDeviceMac::GetRecordingDevice(int &index){if (_inputDeviceIsSpecified){index = _inputDeviceIndexUI;return 0;}return -1;}WebRtc_Word32 AudioDeviceMac::PlayoutIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);available = false;// Try to initialize the playout sideWebRtc_Word32 res = InitPlayout();// Cancel effect of initializationStopPlayout();if (res != -1){available = true;}return 0;}WebRtc_Word32 AudioDeviceMac::RecordingIsAvailable(bool& available){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);available = false;// Try to initialize the recording sideWebRtc_Word32 res = InitRecording();// Cancel effect of initializationStopRecording();if (res != -1){available = true;}return 0;}WebRtc_Word32 AudioDeviceMac::InitPlayout(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (_playing){return -1;}if (!_outputDeviceIsSpecified){return -1;}if (_playIsInitialized){return 0;}// Initialize the speaker (devices might have been added or removed)if (InitSpeaker() == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitSpeaker() failed");}if (!MicrophoneIsInitialized()){// Make this call to check if we are using// one or two devices (_twoDevices)bool available = false;if (MicrophoneIsAvailable(available) == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," MicrophoneIsAvailable() failed");}}PaUtil_FlushRingBuffer(_paRenderBuffer);OSStatus err = noErr;UInt32 size = 0;_renderDelayOffsetSamples = 0;_renderDelayUs = 0;_renderLatencyUs = 0;_renderDeviceIsAlive = 1;_doStop = false;// The internal microphone of a MacBook Pro is located under the left speaker// grille. When the internal speakers are in use, we want to fully stereo// pan to the right.AudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyDataSource,kAudioDevicePropertyScopeOutput, 0 };if (_macBookPro){_macBookProPanRight = false;Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,&propertyAddress);if (hasProperty){UInt32 dataSource = 0;size = sizeof(dataSource);WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &dataSource));if (dataSource == 'ispk'){// _macBookProPanRight = true;WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,_id,"MacBook Pro using internal speakers; stereo"" panning right");} else{WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,_id, "MacBook Pro not using internal speakers");}// Add a listener to determine if the status changes.WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));}}// Get current stream descriptionpropertyAddress.mSelector = kAudioDevicePropertyStreamFormat;memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));size = sizeof(_outStreamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &_outStreamFormat));if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Unacceptable output stream format -> mFormatID",(const char *) &_outStreamFormat.mFormatID);return -1;}if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"Too many channels on device -> mChannelsPerFrame = %d",_outStreamFormat.mChannelsPerFrame);return -1;}if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"Non-interleaved audio data is not supported.","AudioHardware streams should not have this format.");return -1;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Ouput stream format:");WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mSampleRate = %f, mChannelsPerFrame = %u",_outStreamFormat.mSampleRate,_outStreamFormat.mChannelsPerFrame);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mBytesPerPacket = %u, mFramesPerPacket = %u",_outStreamFormat.mBytesPerPacket,_outStreamFormat.mFramesPerPacket);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mBytesPerFrame = %u, mBitsPerChannel = %u",_outStreamFormat.mBytesPerFrame,_outStreamFormat.mBitsPerChannel);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mFormatFlags = %u, mChannelsPerFrame = %u",_outStreamFormat.mFormatFlags,_outStreamFormat.mChannelsPerFrame);logCAMsg(kTraceWarning, kTraceAudioDevice, _id, "mFormatID",(const char *) &_outStreamFormat.mFormatID);_critSectCb.Enter();// Our preferred format to work with_outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2)){_outDesiredFormat.mChannelsPerFrame = 2;} else{// Disable stereo playout when we only have one channel on the device._outDesiredFormat.mChannelsPerFrame = 1;_playChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stereo playout unavailable on this device");}if (_ptrAudioBuffer){// Update audio buffer with the selected parameters_ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);_ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);}_renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT* ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;_outDesiredFormat.mBytesPerPacket = _outDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_outDesiredFormat.mFramesPerPacket = 1; // In uncompressed audio,// a packet is one frame._outDesiredFormat.mBytesPerFrame = _outDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;_outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;#ifdef WEBRTC_BIG_ENDIAN_outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;#endif_outDesiredFormat.mFormatID = kAudioFormatLinearPCM;WEBRTC_CA_LOG_ERR(AudioConverterNew(&_outDesiredFormat, &_outStreamFormat,&_renderConverter));_critSectCb.Leave();// First try to set buffer size to desired value (_playBufDelayFixed)UInt32 bufByteCount = (UInt32)((_outStreamFormat.mSampleRate / 1000.0)* _playBufDelayFixed * _outStreamFormat.mChannelsPerFrame* sizeof(Float32));if (_outStreamFormat.mFramesPerPacket != 0){if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0){bufByteCount = ((UInt32)(bufByteCount/ _outStreamFormat.mFramesPerPacket) + 1)* _outStreamFormat.mFramesPerPacket;}}// Ensure the buffer size is within the acceptable range provided by the device.propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;AudioValueRange range;size = sizeof(range);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &range));if (range.mMinimum > bufByteCount){bufByteCount = range.mMinimum;} else if (range.mMaximum < bufByteCount){bufByteCount = range.mMaximum;}propertyAddress.mSelector = kAudioDevicePropertyBufferSize;size = sizeof(bufByteCount);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, size, &bufByteCount));// Get render device latencypropertyAddress.mSelector = kAudioDevicePropertyLatency;UInt32 latency = 0;size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_renderLatencyUs = (WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"play report delay kAudioDevicePropertyLatency = %d",_renderLatencyUs);#endif// Get render stream latencypropertyAddress.mSelector = kAudioDevicePropertyStreams;AudioStreamID stream = 0;size = sizeof(AudioStreamID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &stream));propertyAddress.mSelector = kAudioStreamPropertyLatency;size = sizeof(UInt32);latency = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_renderLatencyUs += (WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"play report delay kAudioDevicePropertyLatency = %d",(WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate));#endifif (_renderLatencyUs/1000 > 50){_renderLatencyUs = 0;}// Listen for format changespropertyAddress.mSelector = kAudioDevicePropertyStreamFormat;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));// Listen for processor overloadspropertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));// listen for volume changes// Some devices (but not many) support a master channelpropertyAddress.mSelector = kAudioDevicePropertyVolumeScalar;if(AudioObjectHasProperty(_outputDeviceID, &propertyAddress)){// WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));}else{// Typically the L and R channels are 1 and 2 respectively, but could be different/*propertyAddress.mElement = 1;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));propertyAddress.mElement = 2;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));*/}if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_outputDeviceID,deviceIOProc, this, &_deviceIOProcID));}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceAddIOProc(_outputDeviceID, deviceIOProc, this));}// Mark playout side as initialized_playIsInitialized = true;WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," initial playout status: _renderDelayOffsetSamples=%d,"" _renderDelayUs=%d, _renderLatencyUs=%d",_renderDelayOffsetSamples, _renderDelayUs, _renderLatencyUs);return 0;}WebRtc_Word32 AudioDeviceMac::InitRecording(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (_recording){return -1;}if (!_inputDeviceIsSpecified){return -1;}if (_recIsInitialized){return 0;}// Initialize the microphone (devices might have been added or removed)if (InitMicrophone() == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitMicrophone() failed");}if (!SpeakerIsInitialized()){// Make this call to check if we are using// one or two devices (_twoDevices)bool available = false;if (SpeakerIsAvailable(available) == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," SpeakerIsAvailable() failed");}}char MicrophoneName[128];memset(MicrophoneName,0, sizeof(MicrophoneName));bool bMicrophoneHasName = GetDeviceFriendName(kAudioDevicePropertyScopeInput, _inputDeviceID, MicrophoneName);if (bMicrophoneHasName){const char* LogitechConferenceCamConnect = "ConferenceCam Connect";if (strncmp(MicrophoneName, LogitechConferenceCamConnect, strlen(LogitechConferenceCamConnect)) == 0){SetSpecialMicrophoneFormat(16000,0,true,false);}#ifdef BUILD_FOR_MIMOconst static char* RevolabsAudioDevice[] = {"Bloomberg","Revolabs"};for(int i = 0;i < sizeof(RevolabsAudioDevice)/sizeof(char*); i++){if (strstr(MicrophoneName, RevolabsAudioDevice[i]) != 0){SetSpecialMicrophoneFormat(0,24,false,true);}}#endif#ifdef MUTI_MICROPHONE_SUPPORT_bMutilChannelsMic = false;if (strstr(MicrophoneName, "ZOOM UAC-8") != 0 || strstr(MicrophoneName,"ZOOM UAC-2") != 0 || strstr(MicrophoneName,"Dante Virtual Soundcard") != 0 || strstr(MicrophoneName,"Scarlett 18i20 USB") != 0|| (strstr(MicrophoneName,"UMC1820") != 0) || (strstr(MicrophoneName,"UMC404HD") != 0)){_bMutilChannelsMic = true;}#endif}OSStatus err = noErr;UInt32 size = 0;PaUtil_FlushRingBuffer(_paCaptureBuffer);_captureDelayUs = 0;_captureLatencyUs = 0;_captureDelayUsUpdate = 0;_captureDelayUsPrevious = 0;_captureDeviceIsAlive = 1;_doStopRec = false;// Get current stream descriptionAudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));size = sizeof(_inStreamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &_inStreamFormat));if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Unacceptable input stream format -> mFormatID",(const char *) &_inStreamFormat.mFormatID);return -1;}if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,", Too many channels on device (mChannelsPerFrame = %d)",_inStreamFormat.mChannelsPerFrame);return -1;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," Input stream format:");WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," mSampleRate = %f, mChannelsPerFrame = %u",_inStreamFormat.mSampleRate, _inStreamFormat.mChannelsPerFrame);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," mBytesPerPacket = %u, mFramesPerPacket = %u",_inStreamFormat.mBytesPerPacket,_inStreamFormat.mFramesPerPacket);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," mBytesPerFrame = %u, mBitsPerChannel = %u",_inStreamFormat.mBytesPerFrame,_inStreamFormat.mBitsPerChannel);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," mFormatFlags = %u, mChannelsPerFrame = %u",_inStreamFormat.mFormatFlags,_inStreamFormat.mChannelsPerFrame);logCAMsg(kTraceWarning, kTraceAudioDevice, _id, "mFormatID",(const char *) &_inStreamFormat.mFormatID);_critSectCb.Enter();// Our preferred format to work with#ifdef MUTI_MICROPHONE_SUPPORTif(_bMutilChannelsMic){_inDesiredFormat.mChannelsPerFrame = _inStreamFormat.mChannelsPerFrame;_recChannels = _inDesiredFormat.mChannelsPerFrame;}else#endif{if (_inStreamFormat.mChannelsPerFrame > 2){_inDesiredFormat.mChannelsPerFrame = _inStreamFormat.mChannelsPerFrame;_recChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mutil-channel recording on this device");}else if (_inStreamFormat.mChannelsPerFrame == 2 && (_recChannels == 2)){_inDesiredFormat.mChannelsPerFrame = 2;}else{// Disable stereo recording when we only have one channel on the device._inDesiredFormat.mChannelsPerFrame = 1;_recChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stereo recording unavailable on this device");}}if (_ptrAudioBuffer){// Update audio buffer with the selected parameters#ifdef MUTI_MICROPHONE_SUPPORT_ptrAudioBuffer->SetMultiChannelsRecording(_bMutilChannelsMic);#endif_ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);_ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);}_inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;_inDesiredFormat.mBytesPerPacket = _inDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_inDesiredFormat.mFramesPerPacket = 1;_inDesiredFormat.mBytesPerFrame = _inDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;_inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;#ifdef WEBRTC_BIG_ENDIAN_inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;#endif_inDesiredFormat.mFormatID = kAudioFormatLinearPCM;WEBRTC_CA_LOG_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,&_captureConverter));_critSectCb.Leave();// First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)// TODO(xians): investigate this block.UInt32 bufByteCount = (UInt32)((_inStreamFormat.mSampleRate / 1000.0)* 10.0 * N_BLOCKS_IO * _inStreamFormat.mChannelsPerFrame* sizeof(Float32));if(bMicrophoneHasName){const char* FireWire410 = "FireWire 410 Multichannel";if (strncmp(MicrophoneName, FireWire410, strlen(FireWire410)) == 0) {bufByteCount *= 2;}}if (_inStreamFormat.mFramesPerPacket != 0){if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0){bufByteCount = ((UInt32)(bufByteCount/ _inStreamFormat.mFramesPerPacket) + 1)* _inStreamFormat.mFramesPerPacket;}}// Ensure the buffer size is within the acceptable range provided by the device.propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;AudioValueRange range;size = sizeof(range);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &range));if (range.mMinimum > bufByteCount){bufByteCount = range.mMinimum;} else if (range.mMaximum < bufByteCount){bufByteCount = range.mMaximum;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," bufByteCount = %u,range.mMinimum = %u,range.mMaximum = %u",bufByteCount, range.mMinimum, range.mMaximum);propertyAddress.mSelector = kAudioDevicePropertyBufferSize;size = sizeof(bufByteCount);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, size, &bufByteCount));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d,size:%d",propertyAddress.mSelector,size);// Get capture device latencypropertyAddress.mSelector = kAudioDevicePropertyLatency;UInt32 latency = 0;size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_captureLatencyUs = (UInt32)((1.0e6 * latency)/ _inStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"capture report delay kAudioDevicePropertyLatency = %d",_captureLatencyUs);#endifWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d,size:%d",propertyAddress.mSelector,size);// Get capture stream latencypropertyAddress.mSelector = kAudioDevicePropertyStreams;AudioStreamID stream = 0;size = sizeof(AudioStreamID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &stream));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d,size:%d",propertyAddress.mSelector,size);propertyAddress.mSelector = kAudioStreamPropertyLatency;size = sizeof(UInt32);latency = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_captureLatencyUs += (UInt32)((1.0e6 * latency)/ _inStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"capture report delay kAudioStreamPropertyLatency = %d",(UInt32)((1.0e6 * latency)/_inStreamFormat.mSampleRate));#endifWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d,size:%d",propertyAddress.mSelector,size);//Siping: fix echo for C920 microphoneif (_captureLatencyUs/1000 > 50){_captureLatencyUs = 0;}// Listen for format changes// TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?if (_critSectFormatChange == NULL){_critSectFormatChange = CriticalSectionWrapper::CreateCriticalSection();}propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID,&propertyAddress, &objectListenerProc, this));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d",propertyAddress.mSelector);// Listen for processor overloadspropertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID,&propertyAddress, &objectListenerProc, this));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AudioObjectSetPropertyData selector:%d",propertyAddress.mSelector);// listen for volume changes// Some devices (but not many) support a master channel/*propertyAddress.mSelector = kAudioDevicePropertyVolumeScalar;if(AudioObjectHasProperty(_inputDeviceID, &propertyAddress)){WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));}else{// Typically the L and R channels are 1 and 2 respectively, but could be differentpropertyAddress.mElement = 1;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));propertyAddress.mElement = 2;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));}*/_recSameDevice = true;if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,inDeviceIOProc, this, &_inDeviceIOProcID));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AudioDeviceCreateIOProcID _twoDevices = %d,_inputDeviceID = %d,_inDeviceIOProcID = %d", _twoDevices,_inputDeviceID,_inDeviceIOProcID);}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceAddIOProc(_inputDeviceID, inDeviceIOProc,this));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AudioDeviceAddIOProc _twoDevices = %d,_inputDeviceID = %d,_inDeviceIOProcID = %d", _twoDevices,_inputDeviceID,_inDeviceIOProcID);}// Mark recording side as initialized_recIsInitialized = true;return 0;}WebRtc_Word32 AudioDeviceMac::StartRecording(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (!_recIsInitialized){return -1;}if (_recording){return 0;}if (!_initialized){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Recording worker thread has not been started");return -1;}OSStatus err = noErr;unsigned int threadID(0);if (_captureWorkerThread != NULL){_captureWorkerThread->Start(threadID);}_captureWorkerThreadId = threadID;_need_detect = true;_recWaitErrorCount = 0;#ifdef DEVICE_THREAD_EXCEPTIONRunMicrophoneInfo.Stopped = false;RunMicrophoneInfo.errorCount = 0;#endifif (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_LOG_ERR(AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));if (err != noErr){#ifdef DEVICE_THREAD_EXCEPTIONRunMicrophoneInfo.Stopped = true;#endifreturn -1;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AudioDeviceStart _twoDevices = %d,_inputDeviceID = %d,_recSameDevice = %d", _twoDevices,_inputDeviceID,_recSameDevice);}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, inDeviceIOProc));}#ifdef DEVICE_THREAD_EXCEPTIONRunMicrophoneInfo.DeviceID = _inputDeviceID;if (AudioDeviceCreateIOProcID != NULL){RunMicrophoneInfo.DeviceIOProcID = _inDeviceIOProcID;}#endif_recordCallbackHappened = false;_recording = true;#ifdef CHECKTIMESTAMPERROR_timestampErrorCount = 0;_bCheckTimestampError = true;#endif_MicrophoneStartTime = mach_absolute_time();return 0;}WebRtc_Word32 AudioDeviceMac::StopRecording(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (!_recIsInitialized){return 0;}_critSect.Leave();if (_captureWorkerThread != NULL){if (!_captureWorkerThread->Stop()){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Timed out waiting for the capture worker thread to ""stop.");if(!_captureWorkerThread->Shutdown()){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Timed out waiting for the capture worker thread to ""Shutdown.");}else{_critSectCb.Enter();if (_critSectFormatChange){delete _critSectFormatChange;_critSectFormatChange = NULL;}_critSectCb.Leave();}}}_critSect.Enter();_recWaitErrorCount = 0;OSStatus err = noErr;// Stop deviceint32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);{if (_recording /*&& captureDeviceIsAlive == 1*/){_recording = false;_doStopRec = true; // Signal to io proc to stop audio device_critSect.Leave(); // Cannot be under lock, risk of deadlockuint64_t elapsedStartAbsTime = mach_absolute_time() - _MicrophoneStartTime;Nanoseconds elapsedStartNano = AbsoluteToNanoseconds( *(AbsoluteTime *) &elapsedStartAbsTime );uint64_t elapsedStartMs = (* (uint64_t *) &elapsedStartNano) / 1000000;if (!captureDeviceIsAlive || (!_recordCallbackHappened && (elapsedStartMs > MaxNoCallbacktime))|| kEventSignaled != _stopEventRec.Wait(WAIT_THREAD_TERMINAL)){if (m_StopDeviceQueue){dispatch_async(m_StopDeviceQueue, ^{CriticalSectionScoped critScoped(_critSect);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," Timed out stopping the capture IOProc. ""We may have failed to detect a device removal.");if (_doStopRec){if (AudioDeviceCreateIOProcID != NULL){AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," StopRecording AudioDeviceStop _inputDeviceID = %d _outputDeviceID = %d.",_inputDeviceID,_outputDeviceID);}else{AudioDeviceStop(_inputDeviceID,inDeviceIOProc);}if (AudioDeviceDestroyIOProcID != NULL){AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID);}else{AudioDeviceRemoveIOProc(_inputDeviceID, inDeviceIOProc);}_doStopRec = false;}_stopEventRecAgain.Set();});}if (kEventSignaled != _stopEventRecAgain.Wait(WAIT_THREAD_TERMINAL)){//force crashint crashAddress = 120;int* ptr = (int*)crashAddress;*ptr = 0;}}_critSect.Enter();_doStopRec = false;WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id," Recording stopped");}}#ifdef DEVICE_THREAD_EXCEPTIONRunMicrophoneInfo.Stopped = true;RunMicrophoneInfo.errorCount = 0;#endif// Setting this signal will allow the worker thread to be stopped.AtomicSet32(&_captureDeviceIsAlive, 0);_critSectCb.Enter();WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));_critSectCb.Leave();// Remove listeners.AudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,&propertyAddress, &objectListenerProc, this));propertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,&propertyAddress, &objectListenerProc, this));// Some devices (but not many) support a master channel/*propertyAddress.mSelector = kAudioDevicePropertyVolumeScalar;if(AudioObjectHasProperty(_inputDeviceID, &propertyAddress)){WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));}else{// Typically the L and R channels are 1 and 2 respectively, but could be differentpropertyAddress.mElement = 1;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));propertyAddress.mElement = 2;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID, &propertyAddress, &objectListenerProc, this));}*/_recIsInitialized = false;_recording = false;if (_bUseExclusiveMode){_bUseExclusiveMode = false;AdjustMicrophoneSampleRateBaseDeviceMode();}return 0;}class USBDeviceInfo{public:USBDeviceInfo(UInt16 PID, UInt16 VID, UInt32 LocationID): m_pId(PID), m_vId(VID), m_locationId(LocationID){}UInt16 m_pId;UInt16 m_vId;UInt32 m_locationId;};WebRtc_Word32 AudioDeviceMac::SetSpecialMicrophoneFormat(Float64 sampleRate, UInt32 bitsPerChannel, bool bSetSampleRate, bool bSetBitsPerChannel) const{WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "SetSpecialMicrophoneFormat begin");UInt32 size = 0;OSStatus err = noErr;AudioStreamBasicDescription microphoneDefaultStreamFormat;AudioObjectPropertyAddress propertyAddressForFormat = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };memset(µphoneDefaultStreamFormat, 0, sizeof(microphoneDefaultStreamFormat));size = sizeof(microphoneDefaultStreamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddressForFormat, 0, NULL, &size, µphoneDefaultStreamFormat));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "SetSpecialMicrophoneFormat bitsPerChannel = %d, default bitsPerchannel = %d,extension size = %d", bitsPerChannel,microphoneDefaultStreamFormat.mBitsPerChannel,_mUSBExtenderWithAudioIssueVec.size());if (bSetSampleRate && microphoneDefaultStreamFormat.mSampleRate != sampleRate){AudioStreamBasicDescription* p;Boolean ow;int i;UInt32 propertySize=0; //sizeof(p);AudioObjectPropertyAddress propertyAddressForAllFormat = { kAudioDevicePropertyStreamFormats,kAudioDevicePropertyScopeInput, 0 };err = AudioObjectGetPropertyDataSize(_inputDeviceID,&propertyAddressForAllFormat, 0, NULL, &propertySize);if(err == noErr){p = (AudioStreamBasicDescription*)malloc(propertySize);err = AudioObjectGetPropertyData(_inputDeviceID,&propertyAddressForAllFormat, 0, NULL, &propertySize, p);if (err == noErr){int indexForFormat = -1;for(int i=0;i<propertySize/sizeof(AudioStreamBasicDescription);i++){AudioStreamBasicDescription* pp = &(p[i]);if ((pp->mSampleRate == sampleRate) && (pp->mFormatID == kAudioFormatLinearPCM)){indexForFormat = i;break;}}if (indexForFormat != -1 ){err = AudioObjectSetPropertyData(_inputDeviceID,&propertyAddressForFormat,0,NULL,sizeof(AudioStreamBasicDescription),&(p[indexForFormat]));if (err == noErr){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"SetSpecialMicrophoneFormat AdjustMicrophoneSampleRate set microphone format to 16K");}}}free(p);}}if (bSetBitsPerChannel && microphoneDefaultStreamFormat.mBitsPerChannel != bitsPerChannel){bool bHasUSBExtender = false;if(isMacOS10124AndLater()){const UInt16 audioDeviceVendorID[] = {0x2abf};/*const USBDeviceInfo allUSBExtenderDeviceInfo[] ={USBDeviceInfo(0x2512,0x0424,0),USBDeviceInfo(0x6506,0x04b4,0)// ,USBDeviceInfo(2,2,1)// ,USBDeviceInfo(3,3,1)};*/std::vector<USBDeviceInfo> usbExtenderVector;std::vector<USBDeviceInfo> specialDeviceVector;usbExtenderVector.clear();specialDeviceVector.clear();CFMutableDictionaryRef matchingDict;io_iterator_t iter;kern_return_t kr;io_service_t usbDeviceRef;/* set up a matching dictionary for the class */matchingDict = IOServiceMatching(kIOUSBDeviceClassName);if (matchingDict != NULL){kr = IOServiceGetMatchingServices(kIOMasterPortDefault, matchingDict, &iter);if (kr == KERN_SUCCESS){while ((usbDeviceRef = IOIteratorNext(iter))){if (usbDeviceRef){IOUSBDeviceInterface245** usbDeviceInterface = NULL;SInt32 score;IOCFPlugInInterface** plugin = NULL;kern_return_t err;err = IOCreatePlugInInterfaceForService(usbDeviceRef, kIOUSBDeviceUserClientTypeID, kIOCFPlugInInterfaceID, &plugin, &score);if (err == 0 && plugin){err = (*plugin)->QueryInterface(plugin,CFUUIDGetUUIDBytes(kIOUSBDeviceInterfaceID245),(LPVOID*)&usbDeviceInterface);if (err == 0 && usbDeviceInterface){UInt16 PID = 0;UInt16 VID = 0;UInt32 LocationID = 0;err = (*usbDeviceInterface)->GetDeviceVendor(usbDeviceInterface,&VID);err = (*usbDeviceInterface)->GetDeviceProduct(usbDeviceInterface,&PID);err = (*usbDeviceInterface)->GetLocationID(usbDeviceInterface,&LocationID);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "SetSpecialMicrophoneFormat VID = 0x%x, PID = 0x%x,LocationID = 0x%x", VID,PID,LocationID);if (err == 0){for (int i = 0; i < sizeof(audioDeviceVendorID)/sizeof(UInt16); i++){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "SetSpecialMicrophoneFormat 1 , audioDeviceVendorID[i] = 0x%x,i = %d", audioDeviceVendorID[i],i);if (VID == audioDeviceVendorID[i]){specialDeviceVector.push_back(USBDeviceInfo(PID,VID,LocationID));}}for (int i = 0; i < _mUSBExtenderWithAudioIssueVec.size(); i++){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "SetSpecialMicrophoneFormat 2 VID = 0x%x, PID = 0x%x,i = %d", _mUSBExtenderWithAudioIssueVec[i].extenderVID,_mUSBExtenderWithAudioIssueVec[i].extenderPID,i);if (VID == _mUSBExtenderWithAudioIssueVec[i].extenderVID && PID == _mUSBExtenderWithAudioIssueVec[i].extenderPID){usbExtenderVector.push_back(USBDeviceInfo(PID,VID,LocationID));}}}(*usbDeviceInterface)->Release(usbDeviceInterface);}IODestroyPlugInInterface(plugin);}IOObjectRelease(usbDeviceRef);}}}IOObjectRelease(iter);}if (!(specialDeviceVector.empty())&& !(usbExtenderVector.empty())){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"SetSpecialMicrophoneFormat AdjustMicrophoneSampleRate usbExtender.size = %d, specialDeviceVector.size = %d", usbExtenderVector.size(), specialDeviceVector.size());char audioDeviceLocationIDHEX[64];char USBExtenderLocationIDHEX[64];for (int i = 0; i < specialDeviceVector.size(); i++){//0x14240000, 0x14200000//0x14242000, 0x14202000memset(audioDeviceLocationIDHEX, 0, sizeof(audioDeviceLocationIDHEX));sprintf(audioDeviceLocationIDHEX, "0x%x",(unsigned int)specialDeviceVector[i].m_locationId);for (int j = 0; j < usbExtenderVector.size(); j++){memset(USBExtenderLocationIDHEX, 0, sizeof(USBExtenderLocationIDHEX));sprintf(USBExtenderLocationIDHEX, "0x%x",(unsigned int)usbExtenderVector[j].m_locationId);int k = strlen(audioDeviceLocationIDHEX) - 1;for (; k >= 0; k--) {if (audioDeviceLocationIDHEX[k] != '0'){break;}else{audioDeviceLocationIDHEX[k] = '\0';}}k = strlen(USBExtenderLocationIDHEX) - 1;for (; k >= 0; k--) {if (USBExtenderLocationIDHEX[k] != '0'){break;}else{USBExtenderLocationIDHEX[k] = '\0';}}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"SetSpecialMicrophoneFormat AdjustMicrophoneSampleRate audioDevice's locationID = %s,extender's locationID = %s, k = %d",audioDeviceLocationIDHEX,USBExtenderLocationIDHEX,k);if (NULL != strstr(audioDeviceLocationIDHEX,USBExtenderLocationIDHEX)){bHasUSBExtender = true;break;}}if (bHasUSBExtender){break;}}}}if (!bHasUSBExtender){return 0;}AudioStreamBasicDescription* p;Boolean ow;int i;UInt32 propertySize=0; //sizeof(p);AudioObjectPropertyAddress propertyAddressForAllFormat = { kAudioDevicePropertyStreamFormats,kAudioDevicePropertyScopeInput, 0 };err = AudioObjectGetPropertyDataSize(_inputDeviceID,&propertyAddressForAllFormat, 0, NULL, &propertySize);if(err == noErr){p = (AudioStreamBasicDescription*)malloc(propertySize);err = AudioObjectGetPropertyData(_inputDeviceID,&propertyAddressForAllFormat, 0, NULL, &propertySize, p);if (err == noErr){int indexForFormat = -1;for(int i=0;i<propertySize/sizeof(AudioStreamBasicDescription);i++){AudioStreamBasicDescription* pp = &(p[i]);if ((pp->mBitsPerChannel == bitsPerChannel) && (pp->mFormatID == kAudioFormatLinearPCM)){indexForFormat = i;break;}}if (indexForFormat != -1 ){err = AudioObjectSetPropertyData(_inputDeviceID,&propertyAddressForFormat,0,NULL,sizeof(AudioStreamBasicDescription),&(p[indexForFormat]));if (err == noErr){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"SetSpecialMicrophoneFormat AdjustMicrophoneSampleRate set microphone format to 24bit");}}}free(p);}}return 0;}bool AudioDeviceMac::RecordingIsInitialized() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_recIsInitialized);}bool AudioDeviceMac::Recording() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_recording);}bool AudioDeviceMac::PlayoutIsInitialized() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_playIsInitialized);}WebRtc_Word32 AudioDeviceMac::StartPlayout(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);if (!_playIsInitialized){return -1;}if (_playing){return 0;}OSStatus err = noErr;unsigned int threadID(0);if (_renderWorkerThread != NULL){_renderWorkerThread->Start(threadID);}_renderWorkerThreadId = threadID;_need_detect_play = true;#ifdef DEVICE_THREAD_EXCEPTIONRunSpeakerInfo.Stopped = false;RunSpeakerInfo.errorCount = 0;#endifif (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_LOG_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));if (err != noErr){#ifdef DEVICE_THREAD_EXCEPTIONRunSpeakerInfo.Stopped = true;#endifreturn -1;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AudioDeviceStart %s,_outputDeviceID = %d,_deviceIOProcID = %d", __FUNCTION__,_outputDeviceID,_deviceIOProcID);}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, deviceIOProc));}_playCallbackHappened = false;_playing = true;_playWaitErrorCount = 0;#ifdef DEVICE_THREAD_EXCEPTIONRunSpeakerInfo.DeviceID = _outputDeviceID;if (AudioDeviceCreateIOProcID != NULL){RunSpeakerInfo.DeviceIOProcID = _deviceIOProcID;}#endif#ifdef CHECKTIMESTAMPERROR_timestampErrorCount = 0;_bCheckTimestampError = true;#endif#ifdef BUILD_FOR_MIMO_SpeakerStartTime = mach_absolute_time();AudioDeviceID SystemDefaultSpeakerID = kAudioObjectUnknown;if(GetSystemDefaultPlayDevice(SystemDefaultSpeakerID)){if (/*(SystemDefaultSpeakerID != _zoomDeviceSpeakerID) &&*/ (SystemDefaultSpeakerID != _outputDeviceID)){SetSystemDefaultPlayDevice(_outputDeviceID);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "StartPlayout SetSystemDefaultPlayDevice");}}#endifreturn 0;}WebRtc_Word32 AudioDeviceMac::StopPlayout(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_critSect);if (!_playIsInitialized){return 0;}OSStatus err = noErr;int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);if (_playing /*&& renderDeviceIsAlive == 1*/){// We signal a stop for a shared device even when capturing has not// yet ended. This is to ensure the IOProc will return early as// intended (by checking |_playing|) before accessing resources we// free below (e.g. the render converter).//// In the case of a shared device, the IOProc will verify capturing// has ended before stopping itself._playing = false;_doStop = true; // Signal to io proc to stop audio device_critSect.Leave(); // Cannot be under lock, risk of deadlockuint64_t elapsedStartAbsTime = mach_absolute_time() - _SpeakerStartTime;Nanoseconds elapsedStartNs = AbsoluteToNanoseconds( *(AbsoluteTime *) &elapsedStartAbsTime );uint64_t elapsedStartMs = (* (uint64_t *) &elapsedStartNs) / 1000000;if (!renderDeviceIsAlive || (!_playCallbackHappened && (elapsedStartMs > MaxNoCallbacktime) )|| kEventSignaled != _stopEvent.Wait(WAIT_THREAD_TERMINAL)){if (m_StopDeviceQueue){dispatch_async(m_StopDeviceQueue, ^{CriticalSectionScoped critScoped(_critSect);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," Timed out stopping the render IOProc. ""We may have failed to detect a device removal.");// We assume capturing on a shared device has stopped as well if the// IOProc times out.if (_doStop){if (AudioDeviceCreateIOProcID != NULL){AudioDeviceStop(_outputDeviceID, _deviceIOProcID);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," StopPlayout AudioDeviceStop _inputDeviceID = %d _outputDeviceID = %d.",_inputDeviceID,_outputDeviceID);}else{AudioDeviceStop(_outputDeviceID,deviceIOProc);}if (AudioDeviceDestroyIOProcID != NULL){AudioDeviceDestroyIOProcID(_outputDeviceID,_deviceIOProcID);}else{AudioDeviceRemoveIOProc(_outputDeviceID, deviceIOProc);}_doStop = false;}_stopEventAgain.Set();});}if (kEventSignaled != _stopEventAgain.Wait(WAIT_THREAD_TERMINAL)){//force crashint crashAddress = 120;int* ptr = (int*)crashAddress;*ptr = 0;}}_critSect.Enter();_doStop = false;WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,"Playout stopped");}// Setting this signal will allow the worker thread to be stopped.AtomicSet32(&_renderDeviceIsAlive, 0);_critSect.Leave();if (_renderWorkerThread != NULL){if (!_renderWorkerThread->Stop()){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Timed out waiting for the render worker thread to ""stop.");}}#ifdef DEVICE_THREAD_EXCEPTIONRunSpeakerInfo.Stopped = true;RunSpeakerInfo.errorCount = 0;#endif_critSect.Enter();_critSectCb.Enter();WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));_critSectCb.Leave();// Remove listeners.AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput,0 };WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));propertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));// Some devices (but not many) support a master channel/*propertyAddress.mSelector = kAudioDevicePropertyVolumeScalar;if(AudioObjectHasProperty(_outputDeviceID, &propertyAddress)){WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));}else{// Typically the L and R channels are 1 and 2 respectively, but could be differentpropertyAddress.mElement = 1;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));propertyAddress.mElement = 2;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID, &propertyAddress, &objectListenerProc, this));}*/if (_macBookPro){propertyAddress.mSelector = kAudioDevicePropertyDataSource;Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,&propertyAddress);if (hasProperty){WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,&propertyAddress, &objectListenerProc, this));}}_playIsInitialized = false;_playing = false;_playWaitErrorCount = 0;return 0;}WebRtc_Word32 AudioDeviceMac::PlayoutDelay(WebRtc_UWord16& delayMS) const{int32_t renderDelayUs = AtomicGet32(&_msecOnPlaySide);delayMS = static_cast<WebRtc_UWord16> (1e-3 * renderDelayUs + 0.5);return 0;}WebRtc_Word32 AudioDeviceMac::RecordingDelay(WebRtc_UWord16& delayMS) const{int32_t captureDelayUs = AtomicGet32(&_msecOnRecordSide);delayMS = static_cast<WebRtc_UWord16> (1e-3 * captureDelayUs + 0.5);return 0;}WebRtc_Word32 AudioDeviceMac::RecordingTS(WebRtc_UWord64* timestampNS) const{*timestampNS = _recDataInputTimeNs;return 0;}bool AudioDeviceMac::Playing() const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);return (_playing);}WebRtc_Word32 AudioDeviceMac::SetPlayoutBuffer(const AudioDeviceModule::BufferType type,WebRtc_UWord16 sizeMS){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetPlayoutBuffer(type=%u, sizeMS=%u)", type,sizeMS);if (type != AudioDeviceModule::kFixedBufferSize){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Adaptive buffer size not supported on this platform");return -1;}_playBufType = type;_playBufDelayFixed = sizeMS;return 0;}WebRtc_Word32 AudioDeviceMac::PlayoutBuffer(AudioDeviceModule::BufferType& type,WebRtc_UWord16& sizeMS) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);type = _playBufType;sizeMS = _playBufDelayFixed;return 0;}// Not implemented for Mac.WebRtc_Word32 AudioDeviceMac::CPULoad(WebRtc_UWord16& /*load*/) const{WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," API call not supported on this platform");return -1;}bool AudioDeviceMac::PlayoutWarning() const{return (_playWarning > 0);}WebRtc_UWord16 AudioDeviceMac::PlayoutError() const{return (_playError);}bool AudioDeviceMac::RecordingWarning() const{return (_recWarning > 0);}WebRtc_UWord16 AudioDeviceMac::RecordingError() const{return (_recError);}WebRtc_UWord16 AudioDeviceMac::LoopbackRecordingError() const{return _loopbackrecError;}void AudioDeviceMac::ClearPlayoutWarning(){_playWarning = 0;}void AudioDeviceMac::ClearPlayoutError(){_playError = 0;}void AudioDeviceMac::ClearRecordingWarning(){_recWarning = 0;}void AudioDeviceMac::ClearRecordingError(){_recError = 0;}void AudioDeviceMac::ClearLoopbackRecordingError(){_loopbackrecError = 0;}// ============================================================================// Private Methods// ============================================================================#ifdef BUILD_FOR_MIMOWebRtc_Word32 AudioDeviceMac::IsLowPriorityDevice(const AudioObjectPropertyScope scope,AudioDeviceID DeviceId){UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyTransportType,scope, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(DeviceId,&propertyAddress, 0, NULL, &size, &transportType);if (err == noErr){if (transportType == 'usb '){return 0;}else if (transportType == 'hdmi'){return 1;}else if(transportType == 'bltn'){return 2;}else{return 3;}}return 3;}WebRtc_Word32AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,AudioDeviceID scopedDeviceIds[],const WebRtc_UWord32 deviceListLength,WebRtc_UWord8 &ZoomAudioDeviceNum,bool bCheckZoomAudioDeviceNum){ZoomAudioDeviceNum = 0;OSStatus err = noErr;AudioObjectPropertyAddress propertyAddress = {kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };UInt32 size = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size));if (size == 0){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"No devices");return 0;}AudioDeviceID* deviceIds = (AudioDeviceID*) malloc(size);UInt32 numberDevices = size / sizeof(AudioDeviceID);AudioBufferList* bufferList = NULL;UInt32 numberScopedDevices = 0;AudioDeviceID localScopedDeviceIds[MaxNumberDevices];UInt32 DevicePriorityList[MaxNumberDevices];UInt32 scopedDeviceIdsIdx = 0;memset(DevicePriorityList, 0, sizeof(DevicePriorityList));bool listOK = true;WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, deviceIds));if (err != noErr){listOK = false;} else{propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;propertyAddress.mScope = scope;propertyAddress.mElement = 0;for (UInt32 i = 0; i < numberDevices; i++){// Check for input channelsWEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(deviceIds[i],&propertyAddress, 0, NULL, &size));if (err == kAudioHardwareBadDeviceError){// This device doesn't actually exist; continue iterating.continue;} else if (err != noErr){listOK = false;break;}bufferList = (AudioBufferList*) malloc(size);WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(deviceIds[i],&propertyAddress, 0, NULL, &size, bufferList));if (err != noErr){listOK = false;break;}if (bufferList->mNumberBuffers > 0){if (numberScopedDevices >= deviceListLength){WEBRTC_TRACE(kTraceError,kTraceAudioDevice, _id,"Device list is not long enough");listOK = false;break;}if(deviceIds[i] != kAudioDeviceUnknown){size = 0;DevicePriorityList[numberScopedDevices] = 1 + IsLowPriorityDevice(scope, deviceIds[i]);AudioObjectPropertyAddress propertyAddressTemp = {kAudioDevicePropertyStreamConfiguration,(scope == kAudioDevicePropertyScopeOutput) ? kAudioDevicePropertyScopeInput : kAudioDevicePropertyScopeOutput,0 };err = AudioObjectGetPropertyDataSize(deviceIds[i],&propertyAddressTemp, 0, NULL, &size);if (err == noErr && size > 0){AudioBufferList* bufferListTmp = NULL;bufferListTmp = (AudioBufferList*) malloc(size);if(noErr == AudioObjectGetPropertyData(deviceIds[i],&propertyAddressTemp, 0, NULL, &size, bufferListTmp)){if (bufferListTmp->mNumberBuffers > 0){DevicePriorityList[numberScopedDevices] -= 1;}}if (bufferListTmp){free(bufferListTmp);}}WEBRTC_TRACE(kTraceInfo,kTraceAudioDevice, _id,"index = %d, DeviceId = %d, scope = %d, priority = %d", i,deviceIds[i],(scope == kAudioDevicePropertyScopeOutput) ? 0 : 1 ,DevicePriorityList[numberScopedDevices]);localScopedDeviceIds[numberScopedDevices] = deviceIds[i];numberScopedDevices++;}}free(bufferList);bufferList = NULL;} // for}if (!listOK){if (deviceIds){free(deviceIds);deviceIds = NULL;}if (bufferList){free(bufferList);bufferList = NULL;}return -1;}// Happy endingif (deviceIds){free(deviceIds);deviceIds = NULL;}if (numberScopedDevices == 0){return 0;}for (UInt32 DevicePriority = 0; DevicePriority < 5; DevicePriority++){for (int i = 0; i < numberScopedDevices; i++){if (DevicePriority == DevicePriorityList[i]){scopedDeviceIds[scopedDeviceIdsIdx] = localScopedDeviceIds[i];scopedDeviceIdsIdx++;}}}if(bCheckZoomAudioDeviceNum){char deviceFriendName[kAdmMaxDeviceNameSize];int i = 0;for (; i < numberScopedDevices; i++){memset(deviceFriendName, 0, sizeof(deviceFriendName));GetDeviceFriendName(scope, scopedDeviceIds[i], deviceFriendName);if ((strstr(deviceFriendName, ZoomAudioDeviceName2) != 0) || (strstr(deviceFriendName,BlackmagicAudioName) != 0)/* || (strstr(deviceFriendName,MagewellAudioName) != 0)*/|| (strncmp(deviceFriendName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0)){ZoomAudioDeviceNum++;}}}return numberScopedDevices;}#elseWebRtc_Word32 AudioDeviceMac::IsLowPriorityDevice(const AudioObjectPropertyScope scope,AudioDeviceID DeviceId){UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyTransportType,scope, 0 };UInt32 size = sizeof(UInt32);OSStatus err = AudioObjectGetPropertyData(DeviceId,&propertyAddress, 0, NULL, &size, &transportType);if (err == noErr){if (transportType == 'virt' || transportType == 'ntwk' || transportType == 'airp' || transportType == 0){return 1;}}if (scope == kAudioDevicePropertyScopeInput){char deviceFriendName[kAdmMaxDeviceNameSize];memset(deviceFriendName, 0, sizeof(deviceFriendName));const char* BuiltInAudioName = "Built-in Input";GetDeviceFriendName(scope, DeviceId, deviceFriendName);if (strncmp(deviceFriendName, BuiltInAudioName,strlen(BuiltInAudioName)) == 0){return 1;}}return 0;}WebRtc_Word32AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,AudioDeviceID scopedDeviceIds[],const WebRtc_UWord32 deviceListLength,WebRtc_UWord8 &ZoomAudioDeviceNum,bool bCheckZoomAudioDeviceNum){ZoomAudioDeviceNum = 0;OSStatus err = noErr;AudioObjectPropertyAddress propertyAddress = {kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };UInt32 size = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size));if (size == 0){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"No devices");return 0;}AudioDeviceID* deviceIds = (AudioDeviceID*) malloc(size);UInt32 numberDevices = size / sizeof(AudioDeviceID);WEBRTC_TRACE(kTraceWarning,kTraceAudioDevice,_id,"GetNumberDevices, size = %d, numberDevices = %d",size,numberDevices);AudioBufferList* bufferList = NULL;UInt32 numberScopedDevices = 0;AudioDeviceID localScopedDeviceIds[MaxNumberDevices];WebRtc_Word32 bLowPriorityDevice[MaxNumberDevices];UInt32 scopedDeviceIdsIdx = 0;memset(bLowPriorityDevice, 0, sizeof(bLowPriorityDevice));// First check if there is a default device and list itUInt32 hardwareProperty = 0;if (scope == kAudioDevicePropertyScopeOutput){hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;} else{hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;}AudioObjectPropertyAddresspropertyAddressDefault = { hardwareProperty,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };AudioDeviceID usedID = kAudioDeviceUnknown;UInt32 uintSize = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddressDefault, 0, NULL, &uintSize, &usedID));if (usedID != kAudioDeviceUnknown){localScopedDeviceIds[numberScopedDevices] = usedID;bLowPriorityDevice[numberScopedDevices] = IsLowPriorityDevice(scope, usedID);numberScopedDevices++;} else{WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"GetNumberDevices(): Default device unknown");}// Then list the rest of the devicesbool listOK = true;WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, deviceIds));if (err != noErr){listOK = false;} else{propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;propertyAddress.mScope = scope;propertyAddress.mElement = 0;for (UInt32 i = 0; i < numberDevices; i++){WEBRTC_TRACE(kTraceWarning,kTraceAudioDevice,_id,"GetNumberDevices i = %d, id = %d,numberDevices = %d,numberScopedDevices = %d",i,deviceIds[i],numberDevices,numberScopedDevices);// Check for input channelsWEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(deviceIds[i],&propertyAddress, 0, NULL, &size));if (err == kAudioHardwareBadDeviceError){// This device doesn't actually exist; continue iterating.continue;} else if (err != noErr){listOK = false;break;}bufferList = (AudioBufferList*) malloc(size);WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(deviceIds[i],&propertyAddress, 0, NULL, &size, bufferList));if (err != noErr){listOK = false;break;}if (bufferList->mNumberBuffers > 0){if (numberScopedDevices >= deviceListLength){WEBRTC_TRACE(kTraceError,kTraceAudioDevice, _id,"Device list is not long enough");listOK = false;break;}if((deviceIds[i] != kAudioDeviceUnknown) && (deviceIds[i] != usedID)){localScopedDeviceIds[numberScopedDevices] = deviceIds[i];bLowPriorityDevice[numberScopedDevices] = IsLowPriorityDevice(scope, deviceIds[i]);numberScopedDevices++;}}free(bufferList);bufferList = NULL;} // for}if (!listOK){if (deviceIds){free(deviceIds);deviceIds = NULL;}if (bufferList){free(bufferList);bufferList = NULL;}return -1;}// Happy endingif (deviceIds){free(deviceIds);deviceIds = NULL;}if (numberScopedDevices == 0){return 0;}for (int i = 0; i < numberScopedDevices; i++){if (0 == bLowPriorityDevice[i]){scopedDeviceIds[scopedDeviceIdsIdx] = localScopedDeviceIds[i];scopedDeviceIdsIdx++;}}for (int i = 0; i < numberScopedDevices; i++){if (1 == bLowPriorityDevice[i]){scopedDeviceIds[scopedDeviceIdsIdx] = localScopedDeviceIds[i];scopedDeviceIdsIdx++;}}if(bCheckZoomAudioDeviceNum){char deviceFriendName[kAdmMaxDeviceNameSize];int i = 0;for (; i < numberScopedDevices; i++){memset(deviceFriendName, 0, sizeof(deviceFriendName));GetDeviceFriendName(scope, scopedDeviceIds[i], deviceFriendName);if (strstr(deviceFriendName, ZoomAudioDeviceName2) != 0 || (strncmp(deviceFriendName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0)){ZoomAudioDeviceNum++;}}}return numberScopedDevices;}#endifWebRtc_Word32AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,WebRtc_UWord16 index,char* name, char* deviceID){OSStatus err = noErr;UInt32 len = kAdmMaxDeviceNameSize;AudioDeviceID deviceIds[MaxNumberDevices];char deviceNames[MaxNumberDevices][kAdmMaxDeviceNameSize];WebRtc_UWord8 ZoomAudioDeviceNum = 0;int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices,ZoomAudioDeviceNum);if (numberDevices < 0){return -1;} else if (numberDevices == 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"No devices");return -1;}if (index == (WebRtc_UWord16)-1){index = 0;AudioDeviceID usedID = kAudioDeviceUnknown;// Check if there is a default devicebool isDefaultDevice = false;UInt32 hardwareProperty = 0;if (scope == kAudioDevicePropertyScopeOutput){hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;} else{hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;}AudioObjectPropertyAddress propertyAddress = { hardwareProperty,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };UInt32 size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, &usedID));if (usedID == kAudioDeviceUnknown){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"GetDeviceName(): Default device unknown");} else{for (int i = 0; i < numberDevices; i++){if (usedID == deviceIds[i]){index = i;break;}}isDefaultDevice = true;}}else if(index >= numberDevices){return -1;}// If the number is below the number of devices, assume it's "WEBRTC ID"// otherwise assume it's a CoreAudio IDAudioDeviceID usedID = deviceIds[index];AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0 };for (int i = 0; i < numberDevices; i++){UInt32 len = kAdmMaxDeviceNameSize;AudioObjectGetPropertyData( deviceIds[i],&propertyAddress, 0, NULL, &len, deviceNames[i]);}/*if (isDefaultDevice){if (scope == kAudioDevicePropertyScopeOutput){sprintf(name, "Default Speaker");} else{sprintf(name, "Default Microphone");}}else*/UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddresstransportType = { kAudioDevicePropertyTransportType,scope, 0 };UInt32 size = sizeof(UInt32);AudioObjectGetPropertyData(usedID,&propertyAddresstransportType, 0, NULL, &size, &transportType);{if (index < numberDevices){memcpy(name, deviceNames[index], sizeof(deviceNames[index]));}uint16_t sameDeviceNameCount = 0;for (int i = 0; i < index; i++){if (strcmp(name,deviceNames[i]) == 0){sameDeviceNameCount++;}}if (sameDeviceNameCount != 0){char sourceName[128];memset(sourceName,0,128);sprintf(sourceName,"#%d",sameDeviceNameCount);if ((strlen(name) + strlen(sourceName)) < kAdmMaxDeviceNameSize){strcat(name,sourceName);}}if (transportType == 'bltn'){memcpy(deviceID,name,kAdmMaxDeviceNameSize);}propertyAddress.mSelector = kAudioDevicePropertyDataSource;Boolean hasProperty = AudioObjectHasProperty(usedID,&propertyAddress);if(hasProperty){UInt32 dataSource = 0;UInt32 size = sizeof(dataSource);if(noErr == AudioObjectGetPropertyData(usedID,&propertyAddress, 0, NULL, &size, &dataSource)){AudioValueTranslation trans;CFStringRef str = NULL;Boolean ok;trans.mInputData = &dataSource;trans.mInputDataSize = sizeof(UInt32);trans.mOutputData = &str;trans.mOutputDataSize = sizeof(CFStringRef);propertyAddress.mSelector = kAudioDevicePropertyDataSourceNameForIDCFString;size = sizeof(AudioValueTranslation);if(AudioObjectGetPropertyData(usedID,&propertyAddress,0,NULL,&size,&trans)==noErr){char sourceName[128];if(str != NULL && CFStringGetCString(str, sourceName, 128, kCFStringEncodingUTF8)){if ((strlen(name) + strlen(sourceName) + 3) < kAdmMaxDeviceNameSize){strcat(name, " (");strcat(name, sourceName);strcat(name, ")");}}}if(str)CFRelease(str);}}}name[kAdmMaxDeviceNameSize - 1] = '\0';if (transportType != 'bltn'){memcpy(deviceID,name,kAdmMaxDeviceNameSize);}/// mac change id when usb device plug out and in. so here use name/*if( deviceID ){sprintf(deviceID, "%d", (int)usedID);}*/return 0;}WebRtc_Word32 AudioDeviceMac::InitDevice(WebRtc_UWord16 userDeviceIndex,AudioDeviceID& deviceId,const bool isInput){OSStatus err = noErr;UInt32 size = 0;AudioObjectPropertyScope deviceScope;AudioObjectPropertySelector defaultDeviceSelector;AudioDeviceID deviceIds[MaxNumberDevices];if (isInput){deviceScope = kAudioDevicePropertyScopeInput;defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;} else{deviceScope = kAudioDevicePropertyScopeOutput;defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;}AudioObjectPropertyAddresspropertyAddress = { defaultDeviceSelector,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };/*// Get the actual device IDsWebRtc_UWord8 ZoomAudioDeviceNum = 0;int numberDevices = GetNumberDevices(deviceScope, deviceIds,MaxNumberDevices,ZoomAudioDeviceNum);if (numberDevices > 0){numberDevices = CheckAndRemoveZoomDevice(deviceScope, deviceIds, numberDevices);}if (numberDevices < 0){return -1;} else if (numberDevices == 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"InitDevice(): No devices");return -1;}*/char deviceGUIDs1[MaxNumberDevices][kAdmMaxDeviceNameSize];char deviceNames1[MaxNumberDevices][kAdmMaxDeviceNameSize];int numberDevices = MaxNumberDevices;AllRelistDevice(deviceScope,deviceNames1,deviceGUIDs1,deviceIds,numberDevices);if(numberDevices <= 0){return -1;}bool isDefaultDevice = false;deviceId = kAudioDeviceUnknown;if (userDeviceIndex == (WebRtc_UWord16)-1){userDeviceIndex = 0;// Try to use default system devicesize = sizeof(AudioDeviceID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, &deviceId));if (deviceId == kAudioDeviceUnknown){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," No default device exists");}else{if (_SystemDefaultSpeakerID != kAudioDeviceUnknown){char devName[128];memset(devName, 0, sizeof(devName));if(GetDeviceFriendName(deviceScope,deviceId,devName)){if (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0){deviceId = _SystemDefaultSpeakerID;}}}isDefaultDevice = true;for (int i = 0; i < numberDevices; i++){if (deviceId == deviceIds[i]){userDeviceIndex = i;break;}}}}else if(userDeviceIndex >= numberDevices || userDeviceIndex < 0){userDeviceIndex = 0;}deviceId = deviceIds[userDeviceIndex];// Obtain device name and manufacturer for logging.// Also use this as a test to ensure a user-set device ID is valid./*char devName[128];char devManf[128];char devGUID[128];memset(devName, 0, sizeof(devName));memset(devManf, 0, sizeof(devManf));memset(devGUID, 0, sizeof(devGUID));WebRtc_UWord16 deviceIndex = userDeviceIndex;CheckAndIncreaseZoomDevice(deviceScope, numberDevices,deviceIndex);GetDeviceName(deviceScope,deviceIndex,devName,devGUID);*//*propertyAddress.mSelector = kAudioDevicePropertyDeviceName;propertyAddress.mScope = deviceScope;propertyAddress.mElement = 0;size = sizeof(devName);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,&propertyAddress, 0, NULL, &size, devName));propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;size = sizeof(devManf);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,&propertyAddress, 0, NULL, &size, devManf));*/if (isInput){memcpy(_inputDevName,deviceNames1[userDeviceIndex],sizeof(_inputDevName));memcpy(_inputDevGuid,deviceGUIDs1[userDeviceIndex],sizeof(_inputDevGuid));WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Input device: %s %s", _inputDevName, _inputDevGuid);} else{memcpy(_outputDevName,deviceNames1[userDeviceIndex],sizeof(_outputDevName));memcpy(_outputDevGuid,deviceGUIDs1[userDeviceIndex],sizeof(_outputDevGuid));WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Output device: %s %s", _outputDevName, _outputDevGuid);}return 0;}OSStatus AudioDeviceMac::objectListenerProc(AudioObjectID objectId,UInt32 numberAddresses,const AudioObjectPropertyAddress addresses[],void* clientData){AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;assert(ptrThis != NULL);ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);// AudioObjectPropertyListenerProc functions are supposed to return 0return 0;}OSStatus AudioDeviceMac::implObjectListenerProc(const AudioObjectID objectId,const UInt32 numberAddresses,const AudioObjectPropertyAddress addresses[]){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"AudioDeviceMac::implObjectListenerProc()");for (UInt32 i = 0; i < numberAddresses; i++){if (addresses[i].mSelector == kAudioHardwarePropertyDevices){HandleDeviceChange();} else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat){HandleStreamFormatChange(objectId, addresses[i]);} else if (addresses[i].mSelector == kAudioDevicePropertyDataSource){HandleDataSourceChange(objectId, addresses[i]);} else if (addresses[i].mSelector == kAudioDeviceProcessorOverload){HandleProcessorOverload(addresses[i]);}else if (addresses[i].mSelector == kAudioDevicePropertyVolumeScalar){HandleVolumeChange(objectId,addresses[i]);}}return 0;}WebRtc_Word32 AudioDeviceMac::HandleDeviceChange(){OSStatus err = noErr;WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,"kAudioHardwarePropertyDevices");// A device has changed. Check if our registered devices have been removed.// Ensure the devices have been initialized, meaning the IDs are valid.if (MicrophoneIsInitialized()){AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceIsAlive,kAudioDevicePropertyScopeInput, 0 };UInt32 deviceIsAlive = 1;UInt32 size = sizeof(UInt32);err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0,NULL, &size, &deviceIsAlive);if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Capture device is not alive (probably removed)");AtomicSet32(&_captureDeviceIsAlive, 0);_mixerManager.CloseMicrophone();if (_recError == 1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,_id, " pending recording error exists");}// _recError = 1; // triggers callback from module process thread} else if (err != noErr){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Error in AudioDeviceGetProperty()", (const char*) &err);return -1;}}if (SpeakerIsInitialized()){AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceIsAlive,kAudioDevicePropertyScopeOutput, 0 };UInt32 deviceIsAlive = 1;UInt32 size = sizeof(UInt32);err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0,NULL, &size, &deviceIsAlive);if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Render device is not alive (probably removed)");AtomicSet32(&_renderDeviceIsAlive, 0);_mixerManager.CloseSpeaker();if (_playError == 1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,_id, " pending playout error exists");}// _playError = 1; // triggers callback from module process thread} else if (err != noErr){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Error in AudioDeviceGetProperty()", (const char*) &err);return -1;}}return 0;}WebRtc_Word32 AudioDeviceMac::HandleStreamFormatChange(const AudioObjectID objectId,const AudioObjectPropertyAddress propertyAddress){OSStatus err = noErr;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stream format changed");if (objectId != _inputDeviceID && objectId != _outputDeviceID){return 0;}// Get the new device formatAudioStreamBasicDescription streamFormat;UInt32 size = sizeof(streamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,&propertyAddress, 0, NULL, &size, &streamFormat));if (streamFormat.mFormatID != kAudioFormatLinearPCM){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Unacceptable input stream format -> mFormatID",(const char *) &streamFormat.mFormatID);return -1;}if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"Too many channels on device (mChannelsPerFrame = %d)",streamFormat.mChannelsPerFrame);return -1;}WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stream format:");WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mSampleRate = %f, mChannelsPerFrame = %u",streamFormat.mSampleRate, streamFormat.mChannelsPerFrame);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mBytesPerPacket = %u, mFramesPerPacket = %u",streamFormat.mBytesPerPacket, streamFormat.mFramesPerPacket);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mBytesPerFrame = %u, mBitsPerChannel = %u",streamFormat.mBytesPerFrame, streamFormat.mBitsPerChannel);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mFormatFlags = %u, mChannelsPerFrame = %u",streamFormat.mFormatFlags, streamFormat.mChannelsPerFrame);logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",(const char *) &streamFormat.mFormatID);if (propertyAddress.mScope == kAudioDevicePropertyScopeInput){_critSectCb.Enter();if (_critSectFormatChange){_critSectFormatChange->Enter();}memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));#ifdef MUTI_MICROPHONE_SUPPORTif(_bMutilChannelsMic){_inDesiredFormat.mChannelsPerFrame = _inStreamFormat.mChannelsPerFrame;_recChannels = _inDesiredFormat.mChannelsPerFrame;}else#endif{if (_inStreamFormat.mChannelsPerFrame > 2){_inDesiredFormat.mChannelsPerFrame = _inStreamFormat.mChannelsPerFrame;_recChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"mutil-channel recording on this device");}else if (_inStreamFormat.mChannelsPerFrame == 2 && (_recChannels == 2)){_inDesiredFormat.mChannelsPerFrame = 2;}else{// Disable stereo recording when we only have one channel on the device._inDesiredFormat.mChannelsPerFrame = 1;_recChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stereo recording unavailable on this device");}}if (_ptrAudioBuffer){// Update audio buffer with the selected parameters_ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);_ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);}_inDesiredFormat.mBytesPerPacket = _inDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_inDesiredFormat.mBytesPerFrame = _inDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;#ifdef WEBRTC_BIG_ENDIAN_inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;#endif_inDesiredFormat.mFormatID = kAudioFormatLinearPCM;// Recreate the converter with the new format// TODO(xians): make this thread safeWEBRTC_CA_LOG_ERR(AudioConverterDispose(_captureConverter));WEBRTC_CA_LOG_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,&_captureConverter));if (_critSectFormatChange){_critSectFormatChange->Leave();}UInt32 bufByteCount = (UInt32)((_inStreamFormat.mSampleRate / 1000.0)* 10.0 * N_BLOCKS_IO * _inStreamFormat.mChannelsPerFrame* sizeof(Float32));if (_inStreamFormat.mFramesPerPacket != 0){if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0){bufByteCount = ((UInt32)(bufByteCount/ _inStreamFormat.mFramesPerPacket) + 1)* _inStreamFormat.mFramesPerPacket;}}// Ensure the buffer size is within the acceptable range provided by the device.AudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyDataSource,kAudioDevicePropertyScopeInput, 0 };propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;AudioValueRange range;size = sizeof(range);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &range));if (range.mMinimum > bufByteCount){bufByteCount = range.mMinimum;} else if (range.mMaximum < bufByteCount){bufByteCount = range.mMaximum;}propertyAddress.mSelector = kAudioDevicePropertyBufferSize;size = sizeof(bufByteCount);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, size, &bufByteCount));// Get capture device latencypropertyAddress.mSelector = kAudioDevicePropertyLatency;UInt32 latency = 0;size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_captureLatencyUs = (UInt32)((1.0e6 * latency)/ _inStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"capture report delay kAudioDevicePropertyLatency = %d",_captureLatencyUs);#endif// Get capture stream latencypropertyAddress.mSelector = kAudioDevicePropertyStreams;AudioStreamID stream = 0;size = sizeof(AudioStreamID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &stream));propertyAddress.mSelector = kAudioStreamPropertyLatency;size = sizeof(UInt32);latency = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_captureLatencyUs += (UInt32)((1.0e6 * latency)/ _inStreamFormat.mSampleRate);if (_captureLatencyUs/1000 > 50){_captureLatencyUs = 0;}#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"capture report delay kAudioStreamPropertyLatency = %d",(UInt32)((1.0e6 * latency)/ _inStreamFormat.mSampleRate));#endif_critSectCb.Leave();} else{memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));_critSectCb.Enter();_critSectPlayFormatChange.Enter();if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2)){_outDesiredFormat.mChannelsPerFrame = 2;} else{// Disable stereo playout when we only have one channel on the device._outDesiredFormat.mChannelsPerFrame = 1;_playChannels = 1;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"Stereo playout unavailable on this device");}if (_ptrAudioBuffer){// Update audio buffer with the selected parameters_ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);_ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);}_renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT* ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;_outDesiredFormat.mBytesPerPacket = _outDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_outDesiredFormat.mBytesPerFrame = _outDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;#ifdef WEBRTC_BIG_ENDIAN_outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;#endif_outDesiredFormat.mFormatID = kAudioFormatLinearPCM;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "play Stream format changed");// Recreate the converter with the new format// TODO(xians): make this thread safeWEBRTC_CA_LOG_ERR(AudioConverterDispose(_renderConverter));WEBRTC_CA_LOG_ERR(AudioConverterNew(&_outDesiredFormat, &streamFormat,&_renderConverter));UInt32 bufByteCount = (UInt32)((_outStreamFormat.mSampleRate / 1000.0)* _playBufDelayFixed * _outStreamFormat.mChannelsPerFrame* sizeof(Float32));if (_outStreamFormat.mFramesPerPacket != 0){if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0){bufByteCount = ((UInt32)(bufByteCount/ _outStreamFormat.mFramesPerPacket) + 1)* _outStreamFormat.mFramesPerPacket;}}AudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyDataSource,kAudioDevicePropertyScopeOutput, 0 };// Ensure the buffer size is within the acceptable range provided by the device.propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;AudioValueRange range;size = sizeof(range);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &range));if (range.mMinimum > bufByteCount){bufByteCount = range.mMinimum;} else if (range.mMaximum < bufByteCount){bufByteCount = range.mMaximum;}propertyAddress.mSelector = kAudioDevicePropertyBufferSize;size = sizeof(bufByteCount);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, size, &bufByteCount));// Get render device latencypropertyAddress.mSelector = kAudioDevicePropertyLatency;UInt32 latency = 0;size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_renderLatencyUs = (WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"play report delay kAudioDevicePropertyLatency = %d",_renderLatencyUs);#endif// Get render stream latencypropertyAddress.mSelector = kAudioDevicePropertyStreams;AudioStreamID stream = 0;size = sizeof(AudioStreamID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &stream));propertyAddress.mSelector = kAudioStreamPropertyLatency;size = sizeof(UInt32);latency = 0;WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,&propertyAddress, 0, NULL, &size, &latency));_renderLatencyUs += (WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"play report delay kAudioStreamPropertyLatency = %d",(WebRtc_UWord32) ((1.0e6 * latency)/ _outStreamFormat.mSampleRate));#endifif (_renderLatencyUs/1000 > 50){_renderLatencyUs = 0;}_critSectPlayFormatChange.Leave();_critSectCb.Leave();}return 0;}WebRtc_Word32 AudioDeviceMac::HandleDataSourceChange(const AudioObjectID objectId,const AudioObjectPropertyAddress propertyAddress){OSStatus err = noErr;if (_macBookPro && propertyAddress.mScope== kAudioDevicePropertyScopeOutput){WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,"Data source changed");_macBookProPanRight = false;UInt32 dataSource = 0;UInt32 size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,&propertyAddress, 0, NULL, &size, &dataSource));if (dataSource == 'ispk'){// _macBookProPanRight = true;beep();//try to fix the bug that Internal Speaker stop work when plug out the Headphone from audio jack on MacBookPro installed OSX10.8WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"MacBook Pro using internal speakers; stereo panning right");} else{WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"MacBook Pro not using internal speakers");}}return 0;}WebRtc_Word32 AudioDeviceMac::HandleProcessorOverload(const AudioObjectPropertyAddress propertyAddress){// TODO(xians): we probably want to notify the user in some way of the// overload. However, the Windows interpretations of these errors seem to// be more severe than what ProcessorOverload is thrown for.//// We don't log the notification, as it's sent from the HAL's IO thread. We// don't want to slow it down even further.if (propertyAddress.mScope == kAudioDevicePropertyScopeInput){//WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Capture processor// overload");//_callback->ProblemIsReported(// SndCardStreamObserver::ERecordingProblem);} else{//WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,// "Render processor overload");//_callback->ProblemIsReported(// SndCardStreamObserver::EPlaybackProblem);}return 0;}WebRtc_Word32 AudioDeviceMac::HandleVolumeChange(const AudioObjectID objectId,const AudioObjectPropertyAddress propertyAddress){OSStatus err = noErr;float volume = 0;UInt32 dataSize = sizeof(volume);err = AudioObjectGetPropertyData(objectId, &propertyAddress, 0, NULL, &dataSize, &volume);if(noErr == err){CriticalSectionScoped lock(_critSectNotify);if (_pVolumeChangeNotify){if (propertyAddress.mScope == kAudioDevicePropertyScopeOutput){_pVolumeChangeNotify->OnRenderVolumeChange(volume *255,false,IAudioVolumeChangeNotify::kSystem);}else if (propertyAddress.mScope == kAudioDevicePropertyScopeInput){_pVolumeChangeNotify->OnCaptureVolumeChange(volume *255,false,IAudioVolumeChangeNotify::kSystem);}}}return 0;}// ============================================================================// Thread Methods// ============================================================================OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID device, const AudioTimeStamp*,const AudioBufferList* inputData,const AudioTimeStamp* inputTime,AudioBufferList* outputData,const AudioTimeStamp* outputTime,void *clientData){#ifdef DEVICE_THREAD_EXCEPTION// WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,// " deviceIOProc device = %d,stopped = %d",device,RunSpeakerInfo.Stopped);if (device == RunSpeakerInfo.DeviceID && RunSpeakerInfo.Stopped){RunSpeakerInfo.errorCount++;if(RunSpeakerInfo.errorCount >= 50 && AudioDeviceCreateIOProcID != NULL ){AudioDeviceStop(RunSpeakerInfo.DeviceID, RunSpeakerInfo.DeviceIOProcID);AudioDeviceDestroyIOProcID(RunSpeakerInfo.DeviceID, RunSpeakerInfo.DeviceIOProcID);}if (RunSpeakerInfo.errorCount >= 150){//force crashint* ptr = (int*)(RunSpeakerInfo.errorCount);*ptr = 0;}return -1;}if (device != RunSpeakerInfo.DeviceID){RunSpeakerInfo.errorCount++;if (RunSpeakerInfo.errorCount >= 1024){//force crashint* ptr = (int*)(RunSpeakerInfo.errorCount);*ptr = 0;}return -1;}#endifAudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;assert(ptrThis != NULL);ptrThis->implDeviceIOProc(device,inputData, inputTime, outputData, outputTime);// AudioDeviceIOProc functions are supposed to return 0return 0;}OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,UInt32 *numberDataPackets,AudioBufferList *data,AudioStreamPacketDescription **,void *userData){AudioDeviceMac *ptrThis = (AudioDeviceMac *) userData;assert(ptrThis != NULL);return ptrThis->implOutConverterProc(numberDataPackets, data);}OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID device, const AudioTimeStamp*,const AudioBufferList* inputData,const AudioTimeStamp* inputTime,AudioBufferList*,const AudioTimeStamp*, void* clientData){#ifdef DEVICE_THREAD_EXCEPTION// WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,// " inDeviceIOProc device = %d,stopped = %d",device,RunMicrophoneInfo.Stopped);if (device == RunMicrophoneInfo.DeviceID && RunMicrophoneInfo.Stopped){RunMicrophoneInfo.errorCount++;if ( RunMicrophoneInfo.errorCount >= 50 && AudioDeviceCreateIOProcID != NULL ){AudioDeviceStop(RunMicrophoneInfo.DeviceID, RunMicrophoneInfo.DeviceIOProcID);AudioDeviceDestroyIOProcID(RunMicrophoneInfo.DeviceID, RunMicrophoneInfo.DeviceIOProcID);}if (RunMicrophoneInfo.errorCount >= 151){//force crashint* ptr = (int *)RunMicrophoneInfo.errorCount;*ptr = 0;}return 0;}if (device != RunMicrophoneInfo.DeviceID){RunMicrophoneInfo.errorCount++;if (RunMicrophoneInfo.errorCount >= 1025){//force crashint* ptr = (int *)RunMicrophoneInfo.errorCount;*ptr = 0;}return 0;}#endifAudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;assert(ptrThis != NULL);ptrThis->implInDeviceIOProc(device,inputData, inputTime);// AudioDeviceIOProc functions are supposed to return 0return 0;}OSStatus AudioDeviceMac::inConverterProc(AudioConverterRef,UInt32 *numberDataPackets,AudioBufferList *data,AudioStreamPacketDescription ** /*dataPacketDescription*/,void *userData){AudioDeviceMac *ptrThis = static_cast<AudioDeviceMac*> (userData);assert(ptrThis != NULL);return ptrThis->implInConverterProc(numberDataPackets, data);}OSStatus AudioDeviceMac::implDeviceIOProc(AudioDeviceID device,const AudioBufferList *inputData,const AudioTimeStamp *inputTime,AudioBufferList *outputData,const AudioTimeStamp *outputTime){OSStatus err = noErr;UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());/*if (device == _inputDeviceID){implInDeviceIOProc(device,inputData, inputTime);}*/if (device != _outputDeviceID){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Error device id not match device = %d,_outputid = %d",device,_outputDeviceID);return 0;}_playCallbackHappened = true;// Check if we should close down audio device// Double-checked locking optimization to remove locking overheadif (_doStop){_critSect.Enter();if (_doStop){{// In the case of a shared device, the single driving ioProc// is stopped hereif (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID, _deviceIOProcID));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," implDeviceIOProc AudioDeviceStop _inputDeviceID = %d _outputDeviceID = %d,_deviceIOProcID = %d.",_inputDeviceID,_outputDeviceID,_deviceIOProcID);}else{WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID,deviceIOProc));}if (AudioDeviceDestroyIOProcID != NULL){WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,_deviceIOProcID));}else{if (err == noErr){WEBRTC_CA_LOG_WARN(AudioDeviceRemoveIOProc(_outputDeviceID, deviceIOProc));}}if (err == noErr){WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,_id, " Playout or shared device stopped");}}if (err == noErr){_doStop = false;_stopEvent.Set();}_critSect.Leave();return 0;}_critSect.Leave();}if (!_playing){// This can be the case when a shared device is capturing but not// rendering. We allow the checks above before returning to avoid a// timeout when capturing is stopped.return 0;}_critSectPlayFormatChange.Enter();assert(_outStreamFormat.mBytesPerFrame != 0);UInt32 size = outputData->mBuffers->mDataByteSize/ _outStreamFormat.mBytesPerFrame;if (outputData->mBuffers->mNumberChannels != _outStreamFormat.mChannelsPerFrame){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," implDeviceIOProc outputData mNumberBuffers = %d, channelnum = %d, dataByteSize = %d, framenum = %d",outputData->mNumberBuffers, outputData->mBuffers->mNumberChannels, outputData->mBuffers->mDataByteSize, size);}outputData->mNumberBuffers = 1;// TODO(xians): signal an error somehow?err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,this, &size, outputData, NULL);if (size == 0 || err != noErr){_playConvertFailCount += 1;}else{_playConvertFailCount = 0;}if (_playConvertFailCount >= MAXCONVERTFAILEDCOUNT) {AudioConverterDispose(_renderConverter);AudioConverterNew(&_outDesiredFormat, &_outStreamFormat,&_renderConverter);_critSectPlayFormatChange.Leave();_playConvertFailCount = 0;return 0;}if (err != noErr){_critSectPlayFormatChange.Leave();if (err == 1){// This is our own error.WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Error in AudioConverterFillComplexBuffer()");return 1;} else{WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Error in AudioConverterFillComplexBuffer() errorcode = %d", err);return 0;}}ring_buffer_size_t bufSizeSamples =PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);int32_t renderDelayUs = static_cast<int32_t> (1e-3 * (outputTimeNs - nowNs)+ 0.5);renderDelayUs += static_cast<int32_t> ((1.0e6 * bufSizeSamples)/ _outDesiredFormat.mChannelsPerFrame / _outDesiredFormat.mSampleRate+ 0.5);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," implDeviceIOProc report delay nowNS = %llu, outputTimeNs = %llu bufSizeSamples = %d,renderDelayUs = %d",nowNs,outputTimeNs, bufSizeSamples,renderDelayUs);#endifAtomicSet32(&_renderDelayUs, renderDelayUs);_critSectPlayFormatChange.Leave();return 0;}OSStatus AudioDeviceMac::implOutConverterProc(UInt32 *numberDataPackets,AudioBufferList *data){assert(data->mNumberBuffers == 1);ring_buffer_size_t numSamples = *numberDataPackets* _outDesiredFormat.mChannelsPerFrame;data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;// Always give the converter as much as it wants, zero padding as required.data->mBuffers->mDataByteSize = *numberDataPackets* _outDesiredFormat.mBytesPerPacket;data->mBuffers->mData = _renderConvertData;memset(_renderConvertData, 0, sizeof(_renderConvertData));PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_signal_all() error: %d", kernErr);return 1;}return 0;}OSStatus AudioDeviceMac::implInDeviceIOProc(AudioDeviceID device,const AudioBufferList *inputData,const AudioTimeStamp *inputTime){OSStatus err = noErr;UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());if (device != _inputDeviceID){return 0;}_InputTimeNs = inputTimeNs;_NowTimeNs = nowNs;_recordCallbackHappened = true;// Check if we should close down audio device// Double-checked locking optimization to remove locking overheadif (_doStopRec){_critSect.Enter();if (_doStopRec){{// In the case of a shared device, the single driving ioProc// is stopped hereif (AudioDeviceCreateIOProcID != NULL ){WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," implInDeviceIOProc AudioDeviceStop _inputDeviceID = %d _outputDeviceID = %d _recSameDevice = %d _deviceIOProcID = %d,_inDeviceIOProcID = %d.",_inputDeviceID,_outputDeviceID,_recSameDevice,_deviceIOProcID,_inDeviceIOProcID);}else{WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID,inDeviceIOProc));}if (AudioDeviceDestroyIOProcID != NULL){WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_inputDeviceID, _inDeviceIOProcID));}else{if (err == noErr){WEBRTC_CA_LOG_WARN(AudioDeviceRemoveIOProc(_inputDeviceID, inDeviceIOProc));}}if (err == noErr){WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,_id, " Recording device stopped");}}if (err == noErr){_doStopRec = false;_stopEventRec.Set();}_critSect.Leave();return 0;}_critSect.Leave();}if (!_recording){return 0;}if (device != _inputDeviceID){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Error device id not match device = %d,_inputDeviceID = %d",device,_inputDeviceID);return 0;}ring_buffer_size_t bufSizeSamples =PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);int32_t captureDelayUs = static_cast<int32_t> (1e-3 * (nowNs - inputTimeNs)+ 0.5);captureDelayUs+= static_cast<int32_t> ((1.0e6 * bufSizeSamples)/ _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mSampleRate+ 0.5);_InputTimeNs -= static_cast<UInt64>((1.0e9 * bufSizeSamples)/ _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mSampleRate+ 0.5);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," implInDeviceIOProc report delay nowNS = %llu, inputTimeNs = %llu bufSizeSamples = %d,captureDelayUs = %d",nowNs * 1e-6,inputTimeNs * 1e-6, bufSizeSamples,captureDelayUs);#endifAtomicSet32(&_captureDelayUs, captureDelayUs);AtomicSet32(&_captureDelayUsUpdate, 1);assert(inputData->mNumberBuffers == 1);ring_buffer_size_t numSamples = inputData->mBuffers->mDataByteSize* _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mBytesPerPacket;PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,numSamples);kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_signal_all() error: %d", kernErr);}return err;}OSStatus AudioDeviceMac::implInConverterProc(UInt32 *numberDataPackets,AudioBufferList *data){assert(data->mNumberBuffers == 1);ring_buffer_size_t numSamples = *numberDataPackets * _inStreamFormat.mChannelsPerFrame;#ifdef MUTI_MICROPHONE_SUPPORTif (_bMutilChannelsMic){if (_tmpRecordBufferSize < numSamples){if (_pTmpRecordBuffer){delete [] _pTmpRecordBuffer;_pTmpRecordBuffer = NULL;}_tmpRecordBufferSize = numSamples;}if (_pTmpRecordBuffer == NULL){_pTmpRecordBuffer = new Float32[numSamples];}if (numSamples == PaUtil_ReadRingBuffer(_paCaptureBuffer, (void*)(_pTmpRecordBuffer), numSamples)){data->mBuffers->mData = (void*)(_pTmpRecordBuffer);}data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;*numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;data->mBuffers->mDataByteSize = *numberDataPackets * _inStreamFormat.mBytesPerPacket;}else{#endif// Pass the read pointer directly to the converter to avoid a memcpy.void* dummyPtr;ring_buffer_size_t dummySize;PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,&data->mBuffers->mData, &numSamples,&dummyPtr, &dummySize);PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;*numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;data->mBuffers->mDataByteSize = *numberDataPackets* _inStreamFormat.mBytesPerPacket;#ifdef MUTI_MICROPHONE_SUPPORT}#endifreturn 0;}bool AudioDeviceMac::RunRender(void* ptrThis){return static_cast<AudioDeviceMac*> (ptrThis)->RenderWorkerThread();}bool AudioDeviceMac::RenderWorkerThread(){ring_buffer_size_t numSamples = ENGINE_PLAY_BUF_SIZE_IN_SAMPLES* _outDesiredFormat.mChannelsPerFrame;while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer)- _renderDelayOffsetSamples < numSamples){mach_timespec_t timeout;timeout.tv_sec = 0;timeout.tv_nsec = TIMER_PERIOD_MS;kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);if (kernErr == KERN_OPERATION_TIMED_OUT){int32_t signal = AtomicGet32(&_renderDeviceIsAlive);if (signal == 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," RenderWorkerThread waiting for playout timeout error: %d, exit thread!!!!", kernErr);// The render device is no longer alive; stop the worker thread.return false;}WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," RenderWorkerThread waiting for playout timeout error: %d", kernErr);if (_playing && _need_detect_play){_playWaitErrorCount++;if (_playWaitErrorCount >= N_REC_WAIT_ERROR_COUNT){_playError = DEVICE_ERROR_PLAYBACK_NO_CALLBACK;_playWaitErrorCount = 0;return true;}}return true;} else if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_timedwait() error: %d", kernErr);if (_playing){_playWaitErrorCount++;if (_playWaitErrorCount >= N_REC_WAIT_ERROR_COUNT){_playWaitErrorCount = 0;return true;}}}else{_need_detect_play = false;_playWaitErrorCount = 0;}}if (_enableSpkVolumeCheck){if (_spkVolumeCheckFreq == 0){WebRtc_Word32 refineSpeakerVolumeDB = 0;if( 0 != SpeakerVolumeWithDB(_speakerVolumeDB)){_speakerVolumeDB = 0;}if (_speakerVolumeDB > (_optVolDB + 4)){refineSpeakerVolumeDB = (3*(_speakerVolumeDB - _optVolDB + 1))/2;}else if (_speakerVolumeDB > _optVolDB){refineSpeakerVolumeDB = _speakerVolumeDB - _optVolDB;}else{refineSpeakerVolumeDB = 0;}// _ptrAudioBuffer->SetMixerTargetLevelDB(_outputTargetLevelDB - refineSpeakerVolumeDB);}_spkVolumeCheckFreq++;if (_spkVolumeCheckFreq > 20){_spkVolumeCheckFreq = 0;}}WebRtc_Word8 playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];if (!_ptrAudioBuffer){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," capture AudioBuffer is invalid");return false;}// Ask for new PCM data to be played out using the AudioDeviceBuffer.WebRtc_UWord32 nSamples =_ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," invalid number of output samples(%d)", nSamples);}WebRtc_UWord32 nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;SInt16 *pPlayBuffer = (SInt16 *) &playBuffer;{CriticalSectionScoped lock(_zoomDeviceBufferCritSect);if (_loopbackRecording && _loopbackLocalSpeakerPlay){/*WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"RenderWorkerThread _loopbackCaptureBufDataReadIndex(%d),nOutSamples(%d)", _loopbackCaptureBufDataReadIndex,nOutSamples);*/if (_loopbackCaptureAvailbaleBufData != 0) {WebRtc_Word32 temp(0);// fwrite(_ploopbackCaptureBufData[_loopbackCaptureBufDataReadIndex], 2, nOutSamples, _fPCMFile);for (int i = 0; i < nOutSamples; i++){if (_outDesiredFormat.mChannelsPerFrame == 2){temp = pPlayBuffer[i] + _ploopbackCaptureBufData[_loopbackCaptureBufDataReadIndex][i];}else if(_outDesiredFormat.mChannelsPerFrame == 1){temp = pPlayBuffer[i] + _ploopbackCaptureBufData[_loopbackCaptureBufDataReadIndex][i * 2];}if (temp > 32767){pPlayBuffer[i] = 32767;}else if(temp < -32768){pPlayBuffer[i] = -32768;}else{pPlayBuffer[i] = (WebRtc_Word16)temp;}}_loopbackCaptureBufDataReadIndex++;if (_loopbackCaptureBufDataReadIndex == MAXLOOPBACKFRAMEBUFNUM){_loopbackCaptureBufDataReadIndex = 0;}_loopbackCaptureAvailbaleBufData--;}}}if (_bAudioShareStatus){_ptrAudioBuffer->SetLoopbackRenderBuffer((const WebRtc_Word8*)pPlayBuffer,nSamples);_ptrAudioBuffer->DeliverLoopbackRenderData();}if (_macBookProPanRight && (_playChannels == 2)){// Mix entirely into the right channel and zero the left channel.SInt32 sampleInt32 = 0;for (WebRtc_UWord32 sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx+= 2){sampleInt32 = pPlayBuffer[sampleIdx];sampleInt32 += pPlayBuffer[sampleIdx + 1];sampleInt32 /= 2;if (sampleInt32 > 32767){sampleInt32 = 32767;} else if (sampleInt32 < -32768){sampleInt32 = -32768;}pPlayBuffer[sampleIdx] = 0;pPlayBuffer[sampleIdx + 1] = static_cast<SInt16> (sampleInt32);}}PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);return true;}bool AudioDeviceMac::RunCapture(void* ptrThis){return static_cast<AudioDeviceMac*> (ptrThis)->CaptureWorkerThread();}int32_t AudioDeviceMac::RegisterAudioDeviceNotify(IAudioDeviceChangeNotify *pNotify){_pDeviceChangeNotify = pNotify;#ifdef MUTI_MICROPHONE_SUPPORTif (_pdeviceNotifier){_pdeviceNotifier->Init(_pDeviceChangeNotify);}#else_deviceNotifier.Init(_pDeviceChangeNotify);#endifreturn 0;}int32_t AudioDeviceMac::RegisterAudioVolumeNotify(IAudioVolumeChangeNotify *pNotify){CriticalSectionScoped lock(_critSectNotify);_pVolumeChangeNotify = pNotify;return 0;}WebRtc_Word32 AudioDeviceMac::MicrophoneSelect(bool& bselected){bselected = true;return 0;}bool AudioDeviceMac::CaptureWorkerThread(){OSStatus err = noErr;UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;ring_buffer_size_t numSamples = size * _inStreamFormat.mChannelsPerFrame;if (_paCaptureBuffer == NULL){return false;}while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples){mach_timespec_t timeout;timeout.tv_sec = 0;timeout.tv_nsec = TIMER_PERIOD_MS;kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);if (kernErr == KERN_OPERATION_TIMED_OUT){int32_t signal = AtomicGet32(&_captureDeviceIsAlive);if (signal == 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," CaptureWorkerThread waiting for record timeout error: %d, exit thread!!!!", kernErr);return false;}WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," CaptureWorkerThread waiting for record timeout error: %d", kernErr);if (_recording && _need_detect){_recWaitErrorCount++;if (_recWaitErrorCount >= N_REC_WAIT_ERROR_COUNT){_recError = DEVICE_ERROR_RECORD_NO_CALLBACK;_recWaitErrorCount = 0;return true;}}return true;} else if (kernErr != KERN_SUCCESS){if (_recording){_recWaitErrorCount++;if (_recWaitErrorCount >= N_REC_WAIT_ERROR_COUNT){_recWaitErrorCount = 0;return true;}}WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," semaphore_wait() error: %d", kernErr);}else{_need_detect = false;_recWaitErrorCount = 0;}}// WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, " CaptureWorkerThread() aaa ");if (_critSectFormatChange){_critSectFormatChange->Enter();}UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES* _inDesiredFormat.mChannelsPerFrame;SInt16 recordBuffer[noRecSamples*4];AudioBufferList engineBuffer;engineBuffer.mNumberBuffers = 1; // Interleaved channels.engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;engineBuffer.mBuffers->mDataByteSize = noRecSamples*4*sizeof(SInt16);engineBuffer.mBuffers->mData = recordBuffer;err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,this, &size, &engineBuffer, NULL);if (size != ENGINE_REC_BUF_SIZE_IN_SAMPLES){_capConvertFailCount += 1;}else{_capConvertFailCount = 0;}if (_capConvertFailCount >= MAXCONVERTFAILEDCOUNT) {WEBRTC_CA_LOG_ERR(AudioConverterDispose(_captureConverter));WEBRTC_CA_LOG_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,&_captureConverter));_capConvertFailCount = 0;}if (_critSectFormatChange){_critSectFormatChange->Leave();}if (err != noErr){if (err == 1){// This is our own error.return false;} else{logCAMsg(kTraceError, kTraceAudioDevice, _id,"Error in AudioConverterFillComplexBuffer()",(const char *) &err);return false;}}if((_inDesiredFormat.mChannelsPerFrame > 2) && size == ENGINE_REC_BUF_SIZE_IN_SAMPLES && _recChannels == 1){WebRtc_Word16* audio16ptr = (WebRtc_Word16*) recordBuffer;for (WebRtc_UWord32 i = 0; i < ENGINE_REC_BUF_SIZE_IN_SAMPLES; i++){WebRtc_Word32 audio32 = 0;for (WebRtc_UWord32 j = 0; j < _inDesiredFormat.mChannelsPerFrame; j++){audio32 += audio16ptr[(_inDesiredFormat.mChannelsPerFrame)*i + j];}if (audio32 > 32767){audio32 = 32767;}else if(audio32 < -32768){audio32 = -32768;}recordBuffer[i] = static_cast<WebRtc_Word16> (audio32);}}// TODO(xians): what if the returned size is incorrect?if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES){WebRtc_UWord32 currentMicLevel(0);WebRtc_UWord32 newMicLevel(0);WebRtc_Word32 msecOnPlaySide;WebRtc_Word32 msecOnRecordSide;int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);#ifdef CHECKTIMESTAMPERRORif (_bCheckTimestampError){if (captureDelayUs < 0 || renderDelayUs < 0 || (1e-3 * captureDelayUs) > 5000 || (1e-3 * renderDelayUs) > 5000){_timestampErrorCount++;}else{_timestampErrorCount = 0;}if (_timestampErrorCount > 3000){_recError = DEVICE_ERROR_TIMESTAMP_EXCEPTION;// resetCoreAudioService();_bCheckTimestampError = false;}}#endifint32_t captureDelayUsUpdate = AtomicGet32(&_captureDelayUsUpdate);//Add by Sipingif (captureDelayUsUpdate == 1){AtomicSet32(&_captureDelayUsUpdate, 0);_captureDelayUsPrevious = captureDelayUs;if (_InputTimeNs < _NowTimeNs){_recDataInputTimeNs = _InputTimeNs;}else{_recDataInputTimeNs += 10e6;}}else{_captureDelayUsPrevious = _captureDelayUsPrevious - 10000;if (_captureDelayUsPrevious < 0)_captureDelayUsPrevious = 0;captureDelayUs = _captureDelayUsPrevious;_recDataInputTimeNs += 10e6;}_msecOnPlaySide = msecOnPlaySide = static_cast<WebRtc_Word32> (1e-3 * (renderDelayUs+ _renderLatencyUs) + 0.5);_msecOnRecordSide = msecOnRecordSide = static_cast<WebRtc_Word32> (1e-3 * (captureDelayUs+ _captureLatencyUs) + 0.5);#ifdef TRACKDEVICEDELAYWEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," CaptureWorkerThread report delay msecOnPlaySide = %d,msecOnRecordSide = %d",msecOnPlaySide,msecOnRecordSide);#endifif (!_ptrAudioBuffer){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," capture AudioBuffer is invalid");return false;}// store the recorded buffer (no action will be taken if the// #recorded samples is not a full buffer)_ptrAudioBuffer->SetRecordedBuffer((WebRtc_Word8*) &recordBuffer,(WebRtc_UWord32) size);if (AGC()){// store current mic level in the audio buffer if AGC is enabledif (MicrophoneVolume(currentMicLevel) == 0){// this call does not affect the actual microphone volume_ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);}}_ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);// deliver recorded samples at specified sample rate, mic level etc.// to the observer using callback_ptrAudioBuffer->DeliverRecordedData();if (AGC()){newMicLevel = _ptrAudioBuffer->NewMicLevel();if (newMicLevel != 0){// The VQE will only deliver non-zero microphone levels when// a change is needed.// Set this new mic level (received from the observer as return// value in the callback).WEBRTC_TRACE(kTraceStream, kTraceAudioDevice,_id, " AGC change of volume: old=%u => new=%u",currentMicLevel, newMicLevel);if (SetMicrophoneVolume(newMicLevel) == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," the required modification of the microphone ""volume failed");}}}}return true;}WebRtc_Word32 AudioDeviceMac::SetLoopbackRecordDevice(WebRtc_UWord16 index){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"AudioDeviceMac::SetLoopbackRecordDevice(index=%u)", index);if (_loopbackDeviceIsInitialized){return -1;}AudioDeviceID playDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_UWord32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,playDevices, MaxNumberDevices,ZoomAudioDeviceNum);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"SetLoopbackRecordDevice number of availiable waveform-audio output devices is %u",nDevices);if (index > (nDevices - 1)){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"SetLoopbackRecordDevice device index is out of range [0,%u]", (nDevices - 1));return -1;}_loopbackDeviceIndex = index;_loopbackDeviceIsSpecified = true;return 0;}WebRtc_Word32 AudioDeviceMac::SetLoopbackRecordDevice(AudioDeviceModule::WindowsDeviceType device){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"SetLoopbackRecordDevice WindowsDeviceType not supported");if(0 == SetLoopbackRecordDevice(0)){return 0;}else{return -1;}return 0;}int32_t AudioDeviceMac::on_outer_audio_data(const char* audioSamples,const uint32_t nSamples,const uint8_t nBytesPerSample,const uint8_t nChannels,const uint32_t samplesPerSec){#ifdef BUILD_FOR_MIMOCriticalSectionScoped lock(_loopbackCritSect);if (_LoopBackDeviceSource != kExtraSource){return 0;}if (_loopbackRecording){ring_buffer_size_t bufSizeSamples =PaUtil_GetRingBufferReadAvailable(_paLoopbackCaptureBuffer);ring_buffer_size_t numSamples =nSamples/nBytesPerSample;PaUtil_WriteRingBuffer(_paLoopbackCaptureBuffer, audioSamples, numSamples);kern_return_t kernErr = semaphore_signal_all(_loopbackCaptureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"on_outer_audio_data semaphore_signal_all() error: %d", kernErr);}}return 0;#elsereturn 0;#endif}int32_t AudioDeviceMac::SetLoopbackDeviceSource(const LoopbackDeviceSourceType DeviceSource){#ifdef BUILD_FOR_MIMOCriticalSectionScoped lock(_loopbackCritSect);_LoopBackDeviceSource = DeviceSource;#endifreturn 0;}#ifdef BUILD_FOR_MIMOWebRtc_Word32 AudioDeviceMac::InitLoopbackDeviceBM(){return 0;}WebRtc_Word32 AudioDeviceMac::InitLoopbackRecordingBM(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_loopbackCritSect);if (_loopbackRecording){return -1;}if (_loopbackRecIsInitialized){return 0;}// Initialize the loopback (devices might have been added or removed)if (InitLoopbackDevice() == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitLoopbackDevice() failed");}_doStopLoopbackRec = false;if (_ptrAudioBuffer){// Update audio buffer with the selected parameters_ptrAudioBuffer->SetLoopbackRecSampleRate(N_REC_SAMPLES_PER_SEC);_ptrAudioBuffer->SetLoopbackRecChannels((WebRtc_UWord8) _loopbackRecChannels);_ptrAudioBuffer->SetLoopbackRenderSampleRate(N_PLAY_SAMPLES_PER_SEC);_ptrAudioBuffer->SetLoopbackRenderChannels((WebRtc_UWord8)_playChannels);}_loopbackCaptureBufDataBM = NULL;_loopbackRecIsInitialized = true;return 0;}WebRtc_Word32 AudioDeviceMac::StartLoopbackRecordingBM(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_loopbackCritSect);if (!_loopbackRecIsInitialized){return -1;}if (_loopbackRecording){return 0;}if (!_initialized){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," loopback Recording worker thread has not been started");return -1;}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," StartLoopbackRecordingBM _loopbackCaptureBufDataBM = %d,",_loopbackCaptureBufDataBM);if (_loopbackCaptureBufDataBM == NULL){UInt32 powerOfTwo = 1;while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES){powerOfTwo <<= 1;}_loopbackCaptureBufSizeSamples = powerOfTwo;_loopbackCaptureBufDataBM = new int16_t[_loopbackCaptureBufSizeSamples];}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," StartLoopbackRecordingBM _loopbackCaptureBufSizeSamples = %d,",_loopbackCaptureBufSizeSamples);if (_paLoopbackCaptureBuffer == NULL){_paLoopbackCaptureBuffer = new PaUtilRingBuffer;ring_buffer_size_t bufSize = -1;bufSize = PaUtil_InitializeRingBuffer(_paLoopbackCaptureBuffer,sizeof(int16_t),_loopbackCaptureBufSizeSamples,_loopbackCaptureBufDataBM);if (bufSize == -1){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, "StartLoopbackRecording PaUtil_InitializeRingBuffer() error");return -1;}}PaUtil_FlushRingBuffer(_paLoopbackCaptureBuffer);_loopbackCaptureBufDataReadIndex = 0;_loopbackCaptureBufDataWriteIndex = 0;_loopbackCaptureAvailbaleBufData = 0;if (_loopbackCaptureWorkerThread == NULL){_loopbackCaptureWorkerThread= ThreadWrapper::CreateThread(RunLoopbackCapture, this, kRealtimePriority,"LoopbackCaptureWorkerThread");if (_loopbackCaptureWorkerThread == NULL){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " loopback Capture CreateThread() error");return -1;}}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_create(mach_task_self(), &_loopbackCaptureSemaphore,SYNC_POLICY_FIFO, 0);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id," loopback semaphore_create() error: %d", kernErr);return -1;}OSStatus err = noErr;unsigned int threadID(0);if (_loopbackCaptureWorkerThread != NULL){_loopbackCaptureWorkerThread->Start(threadID);}_loopbackCaptureWorkerThreadId = threadID;_loopbackRecording = true;if (_ptrAudioBuffer){if (_ptrAudioBuffer->SetAudioShareStatus(true) == 0){_bAudioShareStatus = true;}}if (_playing){bool curSpeakerMuteStatus = false;SpeakerMute(curSpeakerMuteStatus);if (curSpeakerMuteStatus){SetSpeakerMute(false);}/*WebRtc_UWord32 curSpeakerVolume = 0;SpeakerVolume(curSpeakerVolume);if (curSpeakerVolume < 100){SetSpeakerVolume(100);}*/}/*char name[128];sprintf(name, "/Users/matt/Library/Logs/zoom.us/audio/loopbackrecord%p.pcm",this);_fPCMFile = fopen(name, "wb");sprintf(name, "/Users/matt/Library/Logs/zoom.us/audio/loopbackrecordorg%p.pcm",this);_fPCMFileOrg = fopen(name, "wb");*/return 0;}WebRtc_Word32 AudioDeviceMac::StopLoopbackRecordingBM(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);CriticalSectionScoped lock(_loopbackCritSect);if (!_loopbackRecIsInitialized){return 0;}_loopbackCritSect.Leave();if (_loopbackCaptureWorkerThread != NULL){if (!_loopbackCaptureWorkerThread->Stop()){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Timed out waiting for the loopback caputre worker thread to ""stop.");}}_loopbackCritSect.Enter();if (_loopbackCaptureBufDataBM){delete[] _loopbackCaptureBufDataBM;_loopbackCaptureBufDataBM = NULL;}if (_paLoopbackCaptureBuffer){delete _paLoopbackCaptureBuffer;_paLoopbackCaptureBuffer = NULL;}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_destroy(mach_task_self(), _loopbackCaptureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," StopLoopbackRecording semaphore_destroy() error: %d", kernErr);}if (_loopbackCaptureWorkerThread){delete _loopbackCaptureWorkerThread;_loopbackCaptureWorkerThread = NULL;}_loopbackRecIsInitialized = false;_loopbackRecording = false;if (_ptrAudioBuffer){if (_ptrAudioBuffer->SetAudioShareStatus(false) == 0){_bAudioShareStatus = false;}}/*fclose(_fPCMFile);fclose(_fPCMFileOrg);*/return 0;}#endifWebRtc_Word32 AudioDeviceMac::InitLoopbackDevice(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id,"%s", __FUNCTION__);#ifdef BUILD_FOR_MIMOif (_LoopBackDeviceSource == kExtraSource){return InitLoopbackDeviceBM();}#endifCriticalSectionScoped lock(_loopbackCritSect);CheckAndReplaceZoomDevice();if (!ZoomAudioDeviceCheck()){return -1;}if (_loopbackRecording){return -1;}return 0;}WebRtc_Word32 AudioDeviceMac::InitLoopbackRecording(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);#ifdef BUILD_FOR_MIMOif (_LoopBackDeviceSource == kExtraSource){return InitLoopbackRecordingBM();}#endifCriticalSectionScoped lock(_loopbackCritSect);if (_loopbackRecording){return -1;}if (_loopbackRecIsInitialized){return 0;}// Initialize the loopback (devices might have been added or removed)if (InitLoopbackDevice() == -1){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitLoopbackDevice() failed");}if (!GetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitLoopbackDevice() _SystemDefaultSpeakerID = %d",_SystemDefaultSpeakerID);return -1;}setZoomAudioDeviceProperty(true);if (!SetSystemDefaultPlayDevice(_zoomDeviceSpeakerID)){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," InitLoopbackDevice() _zoomDeviceSpeakerID = %d",_zoomDeviceSpeakerID);SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID);_SystemDefaultSpeakerID = kAudioObjectUnknown;setZoomAudioDeviceProperty(false);return -1;}AudioDeviceID nowDefaultPlayDevice = kAudioDeviceUnknown;if (!GetSystemDefaultPlayDevice(nowDefaultPlayDevice)){SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID);_SystemDefaultSpeakerID = kAudioObjectUnknown;setZoomAudioDeviceProperty(false);return -1;}if(_zoomDeviceSpeakerID != nowDefaultPlayDevice){SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID);_SystemDefaultSpeakerID = kAudioObjectUnknown;setZoomAudioDeviceProperty(false);return -1;}if(_zoomDeviceMicID != kAudioObjectUnknown){_mixerManager.SetMicrophoneMute(false,_zoomDeviceMicID,2);_mixerManager.SetMicrophoneVolume(255,_zoomDeviceMicID,2,true);}if(_zoomDeviceSpeakerID != kAudioObjectUnknown){_mixerManager.SpeakerMute(_bDefaultSpeakerIsMuted,_SystemDefaultSpeakerID);_mixerManager.SetSpeakerVolume(255,true,_zoomDeviceSpeakerID,2,true);}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," InitLoopbackDevice() _zoomDeviceSpeakerID = %d,_SystemDefaultSpeakerID = %d",_zoomDeviceSpeakerID,_SystemDefaultSpeakerID);OSStatus err = noErr;UInt32 size = 0;_loopbackCaptureDeviceIsAlive = 1;_doStopLoopbackRec = false;// Get current stream descriptionAudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };memset(&_loopbackStreamFormat, 0, sizeof(_loopbackStreamFormat));size = sizeof(_loopbackStreamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_zoomDeviceMicID,&propertyAddress, 0, NULL, &size, &_loopbackStreamFormat));if (_loopbackStreamFormat.mFormatID != kAudioFormatLinearPCM){logCAMsg(kTraceError, kTraceAudioDevice, _id,"Unacceptable _loopbackStream stream format -> mFormatID",(const char *) &_loopbackStreamFormat.mFormatID);return -1;}if (_loopbackStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,", Too many loopback channels on device (mChannelsPerFrame = %d)",_loopbackStreamFormat.mChannelsPerFrame);return -1;}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"InitLoopbackDevice loopback stream format:");WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"InitLoopbackDevice mSampleRate = %f, mChannelsPerFrame = %u",_loopbackStreamFormat.mSampleRate, _loopbackStreamFormat.mChannelsPerFrame);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"InitLoopbackDevice mBytesPerPacket = %u, mFramesPerPacket = %u",_loopbackStreamFormat.mBytesPerPacket,_loopbackStreamFormat.mFramesPerPacket);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"InitLoopbackDevice mBytesPerFrame = %u, mBitsPerChannel = %u",_loopbackStreamFormat.mBytesPerFrame,_loopbackStreamFormat.mBitsPerChannel);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"InitLoopbackDevice mFormatFlags = %u, mChannelsPerFrame = %u",_loopbackStreamFormat.mFormatFlags,_loopbackStreamFormat.mChannelsPerFrame);logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",(const char *) &_loopbackStreamFormat.mFormatID);// Our preferred format to work withif (_loopbackStreamFormat.mChannelsPerFrame >= 2 && (_loopbackRecChannels == 2)){_loopbackDesiredFormat.mChannelsPerFrame = 2;} else{// Disable stereo recording when we only have one channel on the device._loopbackDesiredFormat.mChannelsPerFrame = 1;_loopbackRecChannels = 1;WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"Stereo recording unavailable on this device");}if (_ptrAudioBuffer){// Update audio buffer with the selected parameters_ptrAudioBuffer->SetLoopbackRecSampleRate(N_REC_SAMPLES_PER_SEC);_ptrAudioBuffer->SetLoopbackRecChannels((WebRtc_UWord8) _loopbackRecChannels);_ptrAudioBuffer->SetLoopbackRenderSampleRate(N_PLAY_SAMPLES_PER_SEC);_ptrAudioBuffer->SetLoopbackRenderChannels((WebRtc_UWord8)_playChannels);}_loopbackDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;_loopbackDesiredFormat.mBytesPerPacket = _loopbackDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_loopbackDesiredFormat.mFramesPerPacket = 1;_loopbackDesiredFormat.mBytesPerFrame = _loopbackDesiredFormat.mChannelsPerFrame* sizeof(SInt16);_loopbackDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;_loopbackDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger| kLinearPCMFormatFlagIsPacked;#ifdef WEBRTC_BIG_ENDIAN_loopbackDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;#endif_loopbackDesiredFormat.mFormatID = kAudioFormatLinearPCM;WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_loopbackStreamFormat, &_loopbackDesiredFormat,&_loopbackCaptureConverter));// First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)// TODO(xians): investigate this block.UInt32 bufByteCount = (UInt32)((_loopbackStreamFormat.mSampleRate / 1000.0)* 10.0 * N_BLOCKS_IO * _loopbackStreamFormat.mChannelsPerFrame* sizeof(Float32));if (_loopbackStreamFormat.mFramesPerPacket != 0){if (bufByteCount % _loopbackStreamFormat.mFramesPerPacket != 0){bufByteCount = ((UInt32)(bufByteCount/ _loopbackStreamFormat.mFramesPerPacket) + 1)* _loopbackStreamFormat.mFramesPerPacket;}}// Ensure the buffer size is within the acceptable range provided by the device.propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;AudioValueRange range;size = sizeof(range);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_zoomDeviceMicID,&propertyAddress, 0, NULL, &size, &range));if (range.mMinimum > bufByteCount){bufByteCount = range.mMinimum;} else if (range.mMaximum < bufByteCount){bufByteCount = range.mMaximum;}propertyAddress.mSelector = kAudioDevicePropertyBufferSize;size = sizeof(bufByteCount);WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_zoomDeviceMicID,&propertyAddress, 0, NULL, size, &bufByteCount));// Listen for format changes// TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_zoomDeviceMicID,&propertyAddress, &objectListenerProc, this));// Listen for processor overloadspropertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_zoomDeviceMicID,&propertyAddress, &objectListenerProc, this));if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_zoomDeviceMicID,loopbackDeviceIOProc, this, &_loopbackDeviceIOProcID));}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceAddIOProc(_zoomDeviceMicID, loopbackDeviceIOProc,this));}_loopbackRecIsInitialized = true;return 0;}WebRtc_Word32 AudioDeviceMac::StartLoopbackRecording(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);#ifdef BUILD_FOR_MIMOif (_LoopBackDeviceSource == kExtraSource){return StartLoopbackRecordingBM();}#endifCriticalSectionScoped lock(_loopbackCritSect);if (!_loopbackRecIsInitialized){return -1;}if (_loopbackRecording){return 0;}if (!_initialized){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," loopback Recording worker thread has not been started");return -1;}if (_loopbackCaptureBufData == NULL){UInt32 powerOfTwo = 1;while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES){powerOfTwo <<= 1;}_loopbackCaptureBufSizeSamples = powerOfTwo;_loopbackCaptureBufData = new Float32[_loopbackCaptureBufSizeSamples];}if (_paLoopbackCaptureBuffer == NULL){_paLoopbackCaptureBuffer = new PaUtilRingBuffer;ring_buffer_size_t bufSize = -1;bufSize = PaUtil_InitializeRingBuffer(_paLoopbackCaptureBuffer,sizeof(Float32),_loopbackCaptureBufSizeSamples,_loopbackCaptureBufData);if (bufSize == -1){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, "StartLoopbackRecording PaUtil_InitializeRingBuffer() error");if (!SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){CheckAndReplaceZoomDevice();}_SystemDefaultSpeakerID = kAudioObjectUnknown;return -1;}}PaUtil_FlushRingBuffer(_paLoopbackCaptureBuffer);_loopbackCaptureBufDataReadIndex = 0;_loopbackCaptureBufDataWriteIndex = 0;_loopbackCaptureAvailbaleBufData = 0;if (_loopbackCaptureWorkerThread == NULL){_loopbackCaptureWorkerThread= ThreadWrapper::CreateThread(RunLoopbackCapture, this, kRealtimePriority,"LoopbackCaptureWorkerThread");if (_loopbackCaptureWorkerThread == NULL){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,_id, " loopback Capture CreateThread() error");if (!SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){CheckAndReplaceZoomDevice();}_SystemDefaultSpeakerID = kAudioObjectUnknown;return -1;}}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_create(mach_task_self(), &_loopbackCaptureSemaphore,SYNC_POLICY_FIFO, 0);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id," loopback semaphore_create() error: %d", kernErr);if (!SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){CheckAndReplaceZoomDevice();}_SystemDefaultSpeakerID = kAudioObjectUnknown;return -1;}OSStatus err = noErr;unsigned int threadID(0);if (_loopbackCaptureWorkerThread != NULL){_loopbackCaptureWorkerThread->Start(threadID);}_loopbackCaptureWorkerThreadId = threadID;if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_zoomDeviceMicID,_loopbackDeviceIOProcID));}else{WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_zoomDeviceMicID, loopbackDeviceIOProc));}_loopbackRecording = true;if (_ptrAudioBuffer){if (_ptrAudioBuffer->SetAudioShareStatus(true) == 0){_bAudioShareStatus = true;}}if (_playing && _loopbackLocalSpeakerPlay){bool curSpeakerMuteStatus = false;SpeakerMute(curSpeakerMuteStatus);if (curSpeakerMuteStatus){SetSpeakerMute(false);}#ifndef BUILD_FOR_MIMOWebRtc_UWord32 curSpeakerVolume = 0;SpeakerVolume(curSpeakerVolume);if (curSpeakerVolume < 100){SetSpeakerVolume(100);}#endif}_mixerManager.SetSpeakerMute(_bDefaultSpeakerIsMuted,_zoomDeviceSpeakerID,2);/*char name[128];sprintf(name, "/Users/matt/Library/Logs/zoom.us/audio/loopbackrecord%p.pcm",this);_fPCMFile = fopen(name, "wb");sprintf(name, "/Users/matt/Library/Logs/zoom.us/audio/loopbackrecordorg%p.pcm",this);_fPCMFileOrg = fopen(name, "wb");*/return 0;}WebRtc_Word32 AudioDeviceMac::StopLoopbackRecording(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"%s", __FUNCTION__);#ifdef BUILD_FOR_MIMOif (_LoopBackDeviceSource == kExtraSource){return StopLoopbackRecordingBM();}#endifCriticalSectionScoped lock(_loopbackCritSect);if (!_loopbackRecIsInitialized){return 0;}OSStatus err = noErr;// Stop deviceint32_t captureDeviceIsAlive = AtomicGet32(&_loopbackCaptureDeviceIsAlive);{if (_loopbackRecording && captureDeviceIsAlive == 1){_loopbackRecording = false;_doStopLoopbackRec = true; // Signal to io proc to stop audio device_loopbackCritSect.Leave(); // Cannot be under lock, risk of deadlockif (kEventSignaled != _stopEventLoopbackRec.Wait(WAIT_THREAD_TERMINAL)){CriticalSectionScoped critScoped(_loopbackCritSect);WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," Timed out stopping the loopback capture IOProc. ""We may have failed to detect a device removal.");if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_LOG_ERR(AudioDeviceStop(_zoomDeviceMicID, _loopbackDeviceIOProcID));}else{WEBRTC_CA_LOG_ERR(AudioDeviceStop(_zoomDeviceMicID,loopbackDeviceIOProc));}if (AudioDeviceDestroyIOProcID != NULL){WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_zoomDeviceMicID,_loopbackDeviceIOProcID));}else{WEBRTC_CA_LOG_WARN(AudioDeviceRemoveIOProc(_zoomDeviceMicID, loopbackDeviceIOProc));}}_loopbackCritSect.Enter();_doStopLoopbackRec = false;WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id," loopback Recording stopped");}}// Setting this signal will allow the worker thread to be stopped.AtomicSet32(&_loopbackCaptureDeviceIsAlive, 0);_loopbackCritSect.Leave();if (_loopbackCaptureWorkerThread != NULL){if (!_loopbackCaptureWorkerThread->Stop()){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," Timed out waiting for the loopback caputre worker thread to ""stop.");}}_loopbackCritSect.Enter();WEBRTC_CA_LOG_WARN(AudioConverterDispose(_loopbackCaptureConverter));if (_loopbackCaptureBufData){delete[] _loopbackCaptureBufData;_loopbackCaptureBufData = NULL;}if (_paLoopbackCaptureBuffer){delete _paLoopbackCaptureBuffer;_paLoopbackCaptureBuffer = NULL;}kern_return_t kernErr = KERN_SUCCESS;kernErr = semaphore_destroy(mach_task_self(), _loopbackCaptureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," StopLoopbackRecording semaphore_destroy() error: %d", kernErr);}if (_loopbackCaptureWorkerThread){delete _loopbackCaptureWorkerThread;_loopbackCaptureWorkerThread = NULL;}// Remove listeners.AudioObjectPropertyAddresspropertyAddress = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_zoomDeviceMicID,&propertyAddress, &objectListenerProc, this));propertyAddress.mSelector = kAudioDeviceProcessorOverload;WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_zoomDeviceMicID,&propertyAddress, &objectListenerProc, this));_loopbackRecIsInitialized = false;_loopbackRecording = false;if (_ptrAudioBuffer){if (_ptrAudioBuffer->SetAudioShareStatus(false) == 0){_bAudioShareStatus = false;}}bool bZoomAudioDeviceMuted = false;_mixerManager.SpeakerMute(bZoomAudioDeviceMuted, _zoomDeviceSpeakerID);if (!SetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){CheckAndReplaceZoomDevice();}else{setZoomAudioDeviceProperty(false);}if (GetSystemDefaultPlayDevice(_SystemDefaultSpeakerID)){_mixerManager.SetSpeakerMute(bZoomAudioDeviceMuted,_SystemDefaultSpeakerID,0);}_SystemDefaultSpeakerID = kAudioObjectUnknown;/*fclose(_fPCMFile);fclose(_fPCMFileOrg);*/return 0;}bool AudioDeviceMac::LoopbackRecording() const{return _loopbackRecording;}bool AudioDeviceMac::ZoomAudioDeviceCheck(){if (!checkZoomAudioDeviceVersion()){return false;}AudioDeviceID playDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_UWord32 nPlayDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,playDevices, MaxNumberDevices,ZoomAudioDeviceNum);bool bZoomDeviceInstalled = false;WebRtc_Word32 zoomPlayDeviceIndex = GetZoomDeviceIndex(kAudioDevicePropertyScopeOutput,playDevices,nPlayDevices);if (zoomPlayDeviceIndex != nPlayDevices) {_zoomDeviceSpeakerIndex = zoomPlayDeviceIndex;_zoomDeviceSpeakerID = playDevices[zoomPlayDeviceIndex];bZoomDeviceInstalled = true;}if (bZoomDeviceInstalled){AudioDeviceID recordDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_UWord32 nRecordDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,recordDevices, MaxNumberDevices,ZoomAudioDeviceNum);WebRtc_Word32 zoomRecordDeviceIndex = GetZoomDeviceIndex(kAudioDevicePropertyScopeInput,recordDevices,nRecordDevices);if (zoomRecordDeviceIndex!= nRecordDevices){_zoomDeviceMicIndex = zoomRecordDeviceIndex;_zoomDeviceMicID = recordDevices[zoomRecordDeviceIndex];}else{bZoomDeviceInstalled = false;}}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," SoundFlowerDeviceCheck bZoomDeviceInstalled = %d", bZoomDeviceInstalled);return bZoomDeviceInstalled;}bool AudioDeviceMac::SetSystemDefaultPlayDevice(AudioDeviceID device){bool bSuccess = false;UInt32 thePropSize;AudioDeviceID *theDeviceList = NULL;UInt32 theNumDevices = 0;AudioObjectPropertyAddress thePropertyAddress = { kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyElementMaster };OSStatus result = AudioObjectGetPropertyDataSize(kAudioObjectSystemObject, &thePropertyAddress, 0, NULL, &thePropSize);if (result){return bSuccess;}// Find out how many devices are on the systemtheNumDevices = thePropSize / sizeof(AudioDeviceID);theDeviceList = (AudioDeviceID*)calloc(theNumDevices, sizeof(AudioDeviceID));result = AudioObjectGetPropertyData(kAudioObjectSystemObject, &thePropertyAddress, 0, NULL, &thePropSize, theDeviceList);for (UInt32 i=0; i < theNumDevices; i++){if (device == theDeviceList[i]){// we found the device, now it as the default output devicethePropertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice;thePropertyAddress.mScope = kAudioObjectPropertyScopeGlobal;thePropertyAddress.mElement = kAudioObjectPropertyElementMaster;result = AudioObjectSetPropertyData(kAudioObjectSystemObject, &thePropertyAddress, 0, NULL, sizeof(AudioDeviceID), &theDeviceList[i]);if (result){bSuccess = false;}else{bSuccess = true;}break;}}if (theDeviceList){free(theDeviceList);}WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," SetSystemDefaultPlayDevice %d,%d", device,bSuccess);return bSuccess;}bool AudioDeviceMac::GetSystemDefaultPlayDevice(AudioDeviceID& device){AudioObjectPropertyAddresspropertyAddressDefault = { kAudioHardwarePropertyDefaultOutputDevice,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };AudioDeviceID usedID;UInt32 uintSize = sizeof(UInt32);AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddressDefault, 0, NULL, &uintSize, &usedID);if (usedID != kAudioDeviceUnknown){device = usedID;return true;} else{WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"GetSystemDefaultPlayDevice(): Default device unknown");return false;}}OSStatus AudioDeviceMac::loopbackDeviceIOProc(AudioDeviceID device, const AudioTimeStamp*,const AudioBufferList* inputData,const AudioTimeStamp* inputTime,AudioBufferList*,const AudioTimeStamp*, void* clientData){AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;assert(ptrThis != NULL);ptrThis->implLoopbackInDeviceIOProc(device,inputData, inputTime);return 0;}OSStatus AudioDeviceMac::implLoopbackInDeviceIOProc(AudioDeviceID device,const AudioBufferList *inputData,const AudioTimeStamp *inputTime){OSStatus err = noErr;UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());// Check if we should close down audio device// Double-checked locking optimization to remove locking overheadif (_doStopLoopbackRec){_loopbackCritSect.Enter();if (_doStopLoopbackRec){if (!_loopbackRecording){if (AudioDeviceCreateIOProcID != NULL){WEBRTC_CA_LOG_ERR(AudioDeviceStop(_zoomDeviceMicID,_loopbackDeviceIOProcID));}else{WEBRTC_CA_LOG_ERR(AudioDeviceStop(_zoomDeviceMicID,loopbackDeviceIOProc));}if (AudioDeviceDestroyIOProcID != NULL){WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_zoomDeviceMicID,_loopbackDeviceIOProcID));}else{if (err == noErr){WEBRTC_CA_LOG_WARN(AudioDeviceRemoveIOProc(_zoomDeviceMicID, loopbackDeviceIOProc));}}if (err == noErr){WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,_id, " loopback Recording device stopped");}}if (err == noErr){_doStopLoopbackRec = false;_stopEventLoopbackRec.Set();}_loopbackCritSect.Leave();return 0;}_loopbackCritSect.Leave();}if (!_loopbackRecording){return 0;}if (device != _zoomDeviceMicID){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," Error device id not match device = %d,_zoomDeviceMicID = %d",device,_zoomDeviceMicID);return 0;}ring_buffer_size_t bufSizeSamples =PaUtil_GetRingBufferReadAvailable(_paLoopbackCaptureBuffer);assert(inputData->mNumberBuffers == 1);ring_buffer_size_t numSamples = inputData->mBuffers->mDataByteSize* _loopbackStreamFormat.mChannelsPerFrame / _loopbackStreamFormat.mBytesPerPacket;PaUtil_WriteRingBuffer(_paLoopbackCaptureBuffer, inputData->mBuffers->mData,numSamples);kern_return_t kernErr = semaphore_signal_all(_loopbackCaptureSemaphore);if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"implLoopbackInDeviceIOProc semaphore_signal_all() error: %d", kernErr);}return err;}OSStatus AudioDeviceMac::loopbackInConverterProc(AudioConverterRef audioConverter,UInt32 *numberDataPackets, AudioBufferList *data,AudioStreamPacketDescription **dataPacketDescription,void *inUserData){AudioDeviceMac *ptrThis = static_cast<AudioDeviceMac*> (inUserData);assert(ptrThis != NULL);return ptrThis->implLoopbackInConverterProc(numberDataPackets, data);}OSStatus AudioDeviceMac::implLoopbackInConverterProc(UInt32 *numberDataPackets,AudioBufferList *data){assert(data->mNumberBuffers == 1);ring_buffer_size_t numSamples = *numberDataPackets* _loopbackStreamFormat.mChannelsPerFrame;while (PaUtil_GetRingBufferReadAvailable(_paLoopbackCaptureBuffer) < numSamples){mach_timespec_t timeout;timeout.tv_sec = 0;timeout.tv_nsec = TIMER_PERIOD_MS;kern_return_t kernErr = semaphore_timedwait(_loopbackCaptureSemaphore, timeout);if (kernErr == KERN_OPERATION_TIMED_OUT){int32_t signal = AtomicGet32(&_loopbackCaptureDeviceIsAlive);if (signal == 0){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," implLoopbackInConverterProc waiting for loopback record timeout error: %d, exit thread!!!!", kernErr);// The capture device is no longer alive; stop the worker thread.*numberDataPackets = 0;return 1;}WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," implLoopbackInConverterProc waiting for loopback record timeout error: %d", kernErr);} else if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," loopback semaphore_wait() error: %d", kernErr);}}void* dummyPtr;ring_buffer_size_t dummySize;PaUtil_GetRingBufferReadRegions(_paLoopbackCaptureBuffer, numSamples,&data->mBuffers->mData, &numSamples,&dummyPtr, &dummySize);PaUtil_AdvanceRingBufferReadIndex(_paLoopbackCaptureBuffer, numSamples);data->mBuffers->mNumberChannels = _loopbackStreamFormat.mChannelsPerFrame;*numberDataPackets = numSamples / _loopbackStreamFormat.mChannelsPerFrame;data->mBuffers->mDataByteSize = *numberDataPackets * _loopbackStreamFormat.mBytesPerPacket;return 0;}bool AudioDeviceMac::RunLoopbackCapture(void* ptrThis){return static_cast<AudioDeviceMac*> (ptrThis)->LoopbackCaptureWorkerThread();}#ifdef BUILD_FOR_MIMObool AudioDeviceMac::LoopbackCaptureWorkerThreadBM(){OSStatus err = noErr;UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES;SInt16 recordBuffer[noRecSamples * _loopbackRecChannels * 2];ring_buffer_size_t numSamples = noRecSamples * _loopbackRecChannels;while (PaUtil_GetRingBufferReadAvailable(_paLoopbackCaptureBuffer) < numSamples){mach_timespec_t timeout;timeout.tv_sec = 0;timeout.tv_nsec = TIMER_PERIOD_MS;kern_return_t kernErr = semaphore_timedwait(_loopbackCaptureSemaphore, timeout);if (kernErr == KERN_OPERATION_TIMED_OUT){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, " loopback record timeout error: %d", kernErr);return true;} else if (kernErr != KERN_SUCCESS){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, " loopback semaphore_wait() error: %d", kernErr);}}void* dummyPtr = NULL;ring_buffer_size_t dummySize = 0;void* dummyPtr1 = NULL;ring_buffer_size_t dummySize1 = 0;PaUtil_GetRingBufferReadRegions(_paLoopbackCaptureBuffer, numSamples,&dummyPtr, &dummySize, &dummyPtr1, &dummySize1);PaUtil_AdvanceRingBufferReadIndex(_paLoopbackCaptureBuffer, numSamples);if (dummySize != 0){memcpy(recordBuffer,(int16_t *)dummyPtr,dummySize*sizeof(int16_t));}if (dummySize1 != 0){memcpy(recordBuffer + dummySize, (int16_t *)dummyPtr1, dummySize1*sizeof(int16_t));}uint16_t size = noRecSamples;{if (!_ptrAudioBuffer){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," loobback capture AudioBuffer is invalid");return false;}_ptrAudioBuffer->SetLoopbackRecordedBuffer((WebRtc_Word8*) &recordBuffer, (WebRtc_UWord32) size);{CriticalSectionScoped lock(_zoomDeviceBufferCritSect);if (_loopbackCaptureAvailbaleBufData != MAXLOOPBACKFRAMEBUFNUM){memcpy(_ploopbackCaptureBufData[_loopbackCaptureBufDataWriteIndex],recordBuffer,size * 4 );_loopbackCaptureBufDataWriteIndex ++;if (_loopbackCaptureBufDataWriteIndex == MAXLOOPBACKFRAMEBUFNUM) {_loopbackCaptureBufDataWriteIndex = 0;}_loopbackCaptureAvailbaleBufData++;}}bool bHasEcho = false;_ptrAudioBuffer->DeliverLoopbackRecordedData(bHasEcho);}return true;}#endifbool AudioDeviceMac::LoopbackCaptureWorkerThread(){#ifdef BUILD_FOR_MIMOif (_LoopBackDeviceSource == kExtraSource){return LoopbackCaptureWorkerThreadBM();}#endifOSStatus err = noErr;UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES* _loopbackDesiredFormat.mChannelsPerFrame;SInt16 recordBuffer[noRecSamples*4];UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;AudioBufferList engineBuffer;engineBuffer.mNumberBuffers = 1; // Interleaved channels.engineBuffer.mBuffers->mNumberChannels = _loopbackDesiredFormat.mChannelsPerFrame;engineBuffer.mBuffers->mDataByteSize = _loopbackDesiredFormat.mBytesPerPacket* noRecSamples;engineBuffer.mBuffers->mData = recordBuffer;err = AudioConverterFillComplexBuffer(_loopbackCaptureConverter, loopbackInConverterProc,this, &size, &engineBuffer, NULL);if (size != ENGINE_REC_BUF_SIZE_IN_SAMPLES) {WEBRTC_CA_LOG_WARN(AudioConverterDispose(_loopbackCaptureConverter));WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_loopbackStreamFormat, &_loopbackDesiredFormat,&_loopbackCaptureConverter));}if (err != noErr){if (err == 1){// This is our own error.return false;} else{logCAMsg(kTraceError, kTraceAudioDevice, _id,"Error in loopback AudioConverterFillComplexBuffer()",(const char *) &err);return false;}}// TODO(xians): what if the returned size is incorrect?if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES){if (!_ptrAudioBuffer){WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id," loobback capture AudioBuffer is invalid");return false;}_ptrAudioBuffer->SetLoopbackRecordedBuffer((WebRtc_Word8*) &recordBuffer,(WebRtc_UWord32) size);{/*WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,"LoopbackCaptureWorkerThread _loopbackCaptureBufDataWriteIndex(%d),size(%d)", _loopbackCaptureBufDataWriteIndex,size*2);*/CriticalSectionScoped lock(_zoomDeviceBufferCritSect);if (_loopbackCaptureAvailbaleBufData != MAXLOOPBACKFRAMEBUFNUM){memcpy(_ploopbackCaptureBufData[_loopbackCaptureBufDataWriteIndex],recordBuffer,size * 4 );// fwrite(_ploopbackCaptureBufData[_loopbackCaptureBufDataWriteIndex], 4, size, _fPCMFileOrg);_loopbackCaptureBufDataWriteIndex ++;if (_loopbackCaptureBufDataWriteIndex == MAXLOOPBACKFRAMEBUFNUM) {_loopbackCaptureBufDataWriteIndex = 0;}_loopbackCaptureAvailbaleBufData++;}}bool bHasEcho = false;_ptrAudioBuffer->DeliverLoopbackRecordedData(bHasEcho);}return true;}WebRtc_Word32 AudioDeviceMac::CheckAndRemoveZoomDevice(const AudioObjectPropertyScope scope,AudioDeviceID scopedDeviceIds[],const WebRtc_UWord32 deviceListLength){AudioDeviceID deviceIds[MaxNumberDevices];memcpy(deviceIds,scopedDeviceIds,deviceListLength*sizeof(AudioDeviceID));WebRtc_Word32 deviceNum = deviceListLength;WebRtc_Word32 i = 0,j = 0,real_index = 0;for(i = 0; i < deviceListLength; i++){char devName[128];memset(devName, 0, sizeof(devName));if(-1 == GetDeviceFriendName(scope,deviceIds[i],devName)){continue;}#ifdef BUILD_FOR_MIMOif ((strstr(devName, ZoomAudioDeviceName2) != 0) || (strstr(devName,BlackmagicAudioName) != 0)/* || (strstr(devName,MagewellAudioName) != 0)*/|| (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0))#elseif ((strstr(devName, ZoomAudioDeviceName2) != 0) || (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0))#endif{for (j = real_index; j < (deviceNum - 1); j++){scopedDeviceIds[j] = scopedDeviceIds[j+1];}deviceNum--;continue;}real_index++;}return deviceNum;}WebRtc_Word32 AudioDeviceMac::GetZoomDeviceIndex(const AudioObjectPropertyScope scope,AudioDeviceID scopedDeviceIds[],const WebRtc_UWord32 deviceListLength){WebRtc_Word32 i = 0;char devName[128];for (i = 0; i < deviceListLength; i++){memset(devName, 0, sizeof(devName));GetDeviceFriendName(scope,scopedDeviceIds[i],devName);if (strncmp(devName, ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0){break;}}return i;}WebRtc_Word32 AudioDeviceMac::CheckAndIncreaseZoomDevice(const AudioObjectPropertyScope scope,const WebRtc_UWord32 deviceListLength,WebRtc_UWord16 &index){AudioDeviceID deviceIds[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;int numberDevices = GetNumberDevices(scope, deviceIds,MaxNumberDevices,ZoomAudioDeviceNum);WebRtc_Word32 i = 0;char devName[128];for (i = 0; i < numberDevices; i++){memset(devName, 0, sizeof(devName));GetDeviceFriendName(scope,deviceIds[i],devName);#ifdef BUILD_FOR_MIMOif ((strstr(devName, ZoomAudioDeviceName2) != 0) || (strstr(devName,BlackmagicAudioName) != 0)/* || (strstr(devName,MagewellAudioName) != 0)*/|| (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0))#elseif (strstr(devName, ZoomAudioDeviceName2) != 0|| (strncmp(devName,ZoomAudioDeviceName,strlen(ZoomAudioDeviceName)) == 0))#endif{if (index >= i){index++;}}}return index;}bool AudioDeviceMac::CheckAndReplaceZoomDevice(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," CheckAndReplaceZoomDevice");AudioDeviceID defaultSystemSpeaker = kAudioDeviceUnknown;GetSystemDefaultPlayDevice(defaultSystemSpeaker);char defaultSystemSpeakerName[128];memset(defaultSystemSpeakerName,0, sizeof(defaultSystemSpeakerName));if (GetDeviceFriendName(kAudioDevicePropertyScopeOutput, defaultSystemSpeaker, defaultSystemSpeakerName)){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," CheckAndReplaceZoomDevice defaultSystemSpeakerName = %s",defaultSystemSpeakerName);if (strncmp(defaultSystemSpeakerName, ZoomAudioDeviceName, strlen(ZoomAudioDeviceName)) == 0){AudioDeviceID playDevices[MaxNumberDevices];WebRtc_UWord8 ZoomAudioDeviceNum = 0;WebRtc_Word16 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,MaxNumberDevices,ZoomAudioDeviceNum);int index = 0;char devName[128];char devGUID[128];for (; index < nDevices; index++){memset(devName, 0, sizeof(devName));memset(devGUID, 0, sizeof(devGUID));if(0 == GetDeviceName(kAudioDevicePropertyScopeOutput,index,devName,devGUID)){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id," CheckAndReplaceZoomDevice index = %d, devName = %s",index,devName);if (strstr(devName, "Built-in")){if(SetSystemDefaultPlayDevice(playDevices[index])){break;}}}}if (index == nDevices){char devName[128];char devGUID[128];for (index = 0; index < nDevices; index++){memset(devName, 0, sizeof(devName));memset(devGUID, 0, sizeof(devGUID));if(0 == GetDeviceName(kAudioDevicePropertyScopeOutput,index,devName,devGUID)){if (strncmp(devName, ZoomAudioDeviceName, strlen(ZoomAudioDeviceName)) != 0){if(SetSystemDefaultPlayDevice(playDevices[index])){break;}}}}}}}setZoomAudioDeviceProperty(false);return true;}bool AudioDeviceMac::GetDeviceFriendName(const AudioObjectPropertyScope scope,AudioDeviceID DeviceId,char* name){UInt32 len = kAdmMaxDeviceNameSize;AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0 };if(noErr != AudioObjectGetPropertyData(DeviceId,&propertyAddress, 0, NULL, &len, name)){return false;}propertyAddress.mSelector = kAudioDevicePropertyDataSource;Boolean hasProperty = AudioObjectHasProperty(DeviceId,&propertyAddress);if(hasProperty){UInt32 dataSource = 0;UInt32 size = sizeof(dataSource);if(noErr == AudioObjectGetPropertyData(DeviceId,&propertyAddress, 0, NULL, &size, &dataSource)){AudioValueTranslation trans;CFStringRef str = NULL;Boolean ok;trans.mInputData = &dataSource;trans.mInputDataSize = sizeof(UInt32);trans.mOutputData = &str;trans.mOutputDataSize = sizeof(CFStringRef);propertyAddress.mSelector = kAudioDevicePropertyDataSourceNameForIDCFString;size = sizeof(AudioValueTranslation);if(AudioObjectGetPropertyData(DeviceId,&propertyAddress,0,NULL,&size,&trans)==noErr){char sourceName[128];if(str != NULL && CFStringGetCString(str, sourceName, 128, kCFStringEncodingUTF8)){strcat(name, " (");strcat(name, sourceName);strcat(name, ")");}}if(str)CFRelease(str);}}return true;}WebRtc_Word32 AudioDeviceMac::StopLoopbackLocalSpeakerPlay(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__);_loopbackLocalSpeakerPlay = false;return 0;}WebRtc_Word32 AudioDeviceMac::StartLoopbackLocalSpeakerPlay(){WEBRTC_TRACE(kTraceModuleCall, kTraceAudioDevice, _id, "%s", __FUNCTION__);_loopbackLocalSpeakerPlay = true;return 0;}WebRtc_Word32 AudioDeviceMac::RecordingSampleRate(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s, recSampleRate= %d", __FUNCTION__, _inStreamFormat.mSampleRate);return _inStreamFormat.mSampleRate;}WebRtc_Word32 AudioDeviceMac::AdjustMicrophoneSampleRateBaseDeviceMode(){WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s, use_exclusive_mode= %d", __FUNCTION__, _bUseExclusiveMode);if (_bUseExclusiveMode){OSStatus err = noErr;AudioObjectPropertyAddress propertyAddress ={ kAudioHardwarePropertyDefaultInputDevice,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };AudioDeviceID deviceId = kAudioDeviceUnknown;UInt32 size = sizeof(AudioDeviceID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, &deviceId));if (deviceId == kAudioDeviceUnknown){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AdjustMicrophoneSampleRate No default device exists");return -1;}else{UInt32 transportType;AudioObjectPropertyAddress propertyAddressForTP = { kAudioDevicePropertyTransportType,kAudioDevicePropertyScopeInput, 0 };UInt32 size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,&propertyAddressForTP, 0, NULL, &size, &transportType));if (transportType == 'bltn'){AudioObjectPropertyAddresspropertyAddressForFormat = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };memset(&_microphoneDefaultStreamFormat, 0, sizeof(_microphoneDefaultStreamFormat));size = sizeof(_microphoneDefaultStreamFormat);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,&propertyAddressForFormat, 0, NULL, &size, &_microphoneDefaultStreamFormat));if (_microphoneDefaultStreamFormat.mSampleRate != 48000){AudioStreamBasicDescription* p;Boolean ow;int i;UInt32 propertySize=0; //sizeof(p);AudioObjectPropertyAddress propertyAddressForAllFormat = { kAudioDevicePropertyStreamFormats,kAudioDevicePropertyScopeInput, 0 };err = AudioObjectGetPropertyDataSize(deviceId,&propertyAddressForAllFormat, 0, NULL, &propertySize);if(err == noErr){p = (AudioStreamBasicDescription*)malloc(propertySize);err = AudioObjectGetPropertyData(deviceId,&propertyAddressForAllFormat, 0, NULL, &propertySize, p);if (err == noErr){int indexFor48KFormat = -1;for(int i=0;i<propertySize/sizeof(AudioStreamBasicDescription);i++){AudioStreamBasicDescription* pp = &(p[i]);if ((pp->mSampleRate == 48000) && (pp->mFormatID == kAudioFormatLinearPCM)){indexFor48KFormat = i;break;}}if (indexFor48KFormat != -1 ){err = AudioObjectSetPropertyData(deviceId,&propertyAddressForFormat,0,NULL,sizeof(AudioStreamBasicDescription),&(p[indexFor48KFormat]));if (err == noErr){_bMicrophoneDefaultStreamFormatChanged = true;WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AdjustMicrophoneSampleRate set microphone format to 48K");}}}free(p);}}}}}else{if (_bMicrophoneDefaultStreamFormatChanged){OSStatus err = noErr;AudioObjectPropertyAddress propertyAddress ={ kAudioHardwarePropertyDefaultInputDevice,kAudioObjectPropertyScopeGlobal,kAudioObjectPropertyElementMaster };AudioDeviceID deviceId = kAudioDeviceUnknown;UInt32 size = sizeof(AudioDeviceID);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,&propertyAddress, 0, NULL, &size, &deviceId));if (deviceId == kAudioDeviceUnknown){WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id," AdjustMicrophoneSampleRate No default device exists");return -1;}else{UInt32 transportType = 0;AudioObjectPropertyAddress propertyAddressForTP = { kAudioDevicePropertyTransportType,kAudioDevicePropertyScopeInput, 0 };UInt32 size = sizeof(UInt32);WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,&propertyAddressForTP, 0, NULL, &size, &transportType));if (transportType == 'bltn'){AudioObjectPropertyAddresspropertyAddressForFormat = { kAudioDevicePropertyStreamFormat,kAudioDevicePropertyScopeInput, 0 };err = AudioObjectSetPropertyData(deviceId,&propertyAddressForFormat,0,NULL,sizeof(AudioStreamBasicDescription),&(_microphoneDefaultStreamFormat));WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,"AdjustMicrophoneSampleRate set microphone to default format err = %d",err);}}}_bMicrophoneDefaultStreamFormatChanged = false;}return 0;}WebRtc_Word32 AudioDeviceMac::SetUSBExtenderWithAudioIssue(void* USBExtenderWithAudioIssueList, WebRtc_UWord32 size){int32_t USBExtenderWithIssueNum = size / sizeof(USBExtenderWithAudioIssueInfo);WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"SetUSBExtenderWithAudioIssue size = %d,USBExtenderWithIssueNum = %d",size,USBExtenderWithIssueNum);USBExtenderWithAudioIssueInfo* pUSBExtenderWithIssue = static_cast<USBExtenderWithAudioIssueInfo*>(USBExtenderWithAudioIssueList);for (int i = 0; i < USBExtenderWithIssueNum; i++){_mUSBExtenderWithAudioIssueVec.push_back((USBExtenderWithAudioIssueInfo){pUSBExtenderWithIssue->extenderPID,pUSBExtenderWithIssue->extenderVID});WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,"SetUSBExtenderWithAudioIssue index = %d,USBExtenderVID = 0x%x,USBExtenderPID = 0x%x",i,pUSBExtenderWithIssue->extenderVID,pUSBExtenderWithIssue->extenderPID);pUSBExtenderWithIssue++;}return 0;}} // namespace webrtc