diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java index 93343a0d3f5..fab9201c56c 100644 --- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java +++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java @@ -595,10 +595,9 @@ void setupAudioDevice() { // Enable/disable OpenSL ES playback. if (!peerConnectionParameters.useOpenSLES) { Log.d(TAG, "Disable OpenSL ES audio even if device supports it"); - AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(true /* enable */); } else { Log.d(TAG, "Allow OpenSL ES audio if device supports it"); - AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(false); + // TODO(magjed): Add support for external OpenSLES ADM. } if (peerConnectionParameters.disableBuiltInAEC) { diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 8b6c0d2a429..3061170c470 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -164,7 +164,8 @@ rtc_source_set("audio_device_jni") { sources = [ "src/jni/audio_device/audio_common.h", - "src/jni/audio_device/audio_device_template_android.h", + "src/jni/audio_device/audio_device_module.cc", + "src/jni/audio_device/audio_device_module.h", "src/jni/audio_device/audio_manager.cc", "src/jni/audio_device/audio_manager.h", "src/jni/audio_device/audio_record_jni.cc", diff --git a/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java index cc6f37f5547..a9c75b84069 100644 --- a/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/AudioDeviceModule.java @@ -24,10 +24,6 @@ public class AudioDeviceModule { public AudioDeviceModule() {} /* AudioManager */ - public static void setBlacklistDeviceForOpenSLESUsage(boolean enable) { - WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(enable); - } - public static void setStereoInput(boolean enable) { WebRtcAudioManager.setStereoInput(enable); } diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.cc b/sdk/android/native_api/audio_device_module/audio_device_android.cc index 9821f3f699e..2649aa7217c 100644 --- a/sdk/android/native_api/audio_device_module/audio_device_android.cc +++ b/sdk/android/native_api/audio_device_module/audio_device_android.cc @@ -11,31 +11,76 @@ #include "sdk/android/native_api/audio_device_module/audio_device_android.h" #include +#include + #include "rtc_base/logging.h" +#include "rtc_base/ptr_util.h" #include "rtc_base/refcount.h" #include "rtc_base/refcountedobject.h" -#include "system_wrappers/include/metrics.h" +#include "sdk/android/src/jni/audio_device/aaudio_player.h" +#include "sdk/android/src/jni/audio_device/aaudio_recorder.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" +#include "sdk/android/src/jni/audio_device/audio_record_jni.h" +#include "sdk/android/src/jni/audio_device/audio_track_jni.h" +#include "sdk/android/src/jni/audio_device/opensles_player.h" +#include "sdk/android/src/jni/audio_device/opensles_recorder.h" +#include "system_wrappers/include/metrics.h" namespace webrtc { +namespace { + +// This template function takes care of some boiler plate. +template +rtc::scoped_refptr CreateAudioDeviceModuleTemplate( + AudioDeviceModule::AudioLayer audio_layer, + JNIEnv* env, + jobject application_context) { + auto audio_manager = rtc::MakeUnique( + env, audio_layer, JavaParamRef(application_context)); + auto audio_input = rtc::MakeUnique(audio_manager.get()); + auto audio_output = rtc::MakeUnique(audio_manager.get()); + return CreateAudioDeviceModuleFromInputAndOutput( + audio_layer, std::move(audio_manager), std::move(audio_input), + std::move(audio_output)); +} + +} // namespace + #if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) rtc::scoped_refptr CreateAAudioAudioDeviceModule( JNIEnv* env, jobject application_context) { - return android_adm::AudioManager::CreateAAudioAudioDeviceModule( - env, JavaParamRef(application_context)); + RTC_LOG(INFO) << __FUNCTION__; + return CreateAudioDeviceModuleTemplate( + AudioDeviceModule::kAndroidAAudioAudio, env, application_context); } #endif -rtc::scoped_refptr CreateAudioDeviceModule( +rtc::scoped_refptr CreateJavaAudioDeviceModule( + JNIEnv* env, + jobject application_context) { + return CreateAudioDeviceModuleTemplate( + AudioDeviceModule::kAndroidJavaAudio, env, application_context); +} + +rtc::scoped_refptr CreateOpenSLESAudioDeviceModule( JNIEnv* env, - jobject application_context, - bool use_opensles_input, - bool use_opensles_output) { - return android_adm::AudioManager::CreateAudioDeviceModule( - env, JavaParamRef(application_context), use_opensles_input, - use_opensles_output); + jobject application_context) { + return CreateAudioDeviceModuleTemplate( + AudioDeviceModule::kAndroidJavaAudio, env, application_context); +} + +rtc::scoped_refptr +CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env, + jobject application_context) { + return CreateAudioDeviceModuleTemplate( + AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio, env, + application_context); } } // namespace webrtc diff --git a/sdk/android/native_api/audio_device_module/audio_device_android.h b/sdk/android/native_api/audio_device_module/audio_device_android.h index 5f2561d8059..ef404fd277e 100644 --- a/sdk/android/native_api/audio_device_module/audio_device_android.h +++ b/sdk/android/native_api/audio_device_module/audio_device_android.h @@ -23,11 +23,17 @@ rtc::scoped_refptr CreateAAudioAudioDeviceModule( jobject application_context); #endif -rtc::scoped_refptr CreateAudioDeviceModule( +rtc::scoped_refptr CreateJavaAudioDeviceModule( JNIEnv* env, - jobject application_context, - bool use_opensles_input, - bool use_opensles_output); + jobject application_context); + +rtc::scoped_refptr CreateOpenSLESAudioDeviceModule( + JNIEnv* env, + jobject application_context); + +rtc::scoped_refptr +CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env, + jobject application_context); } // namespace webrtc diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java index aa836567e0b..e75b3ed50d7 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java @@ -42,20 +42,6 @@ class WebRtcAudioManager { private static boolean useStereoOutput = false; private static boolean useStereoInput = false; - private static boolean blacklistDeviceForOpenSLESUsage = false; - private static boolean blacklistDeviceForOpenSLESUsageIsOverridden = false; - - // Call this method to override the default list of blacklisted devices - // specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS. - // Allows an app to take control over which devices to exclude from using - // the OpenSL ES audio output path - // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. - @SuppressWarnings("NoSynchronizedMethodCheck") - public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) { - blacklistDeviceForOpenSLESUsageIsOverridden = true; - blacklistDeviceForOpenSLESUsage = enable; - } - // Call these methods to override the default mono audio modes for the specified direction(s) // (input and/or output). // TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression. @@ -204,19 +190,7 @@ private boolean isCommunicationModeEnabled() { return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION); } - @CalledByNative - private static boolean isDeviceBlacklistedForOpenSLESUsage() { - boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden - ? blacklistDeviceForOpenSLESUsage - : WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage(); - if (blacklisted) { - Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!"); - } - return blacklisted; - } - // Returns true if low-latency audio output is supported. - @CalledByNative public static boolean isLowLatencyOutputSupported(Context context) { return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY); } @@ -224,7 +198,6 @@ public static boolean isLowLatencyOutputSupported(Context context) { // Returns true if low-latency audio input is supported. // TODO(henrika): remove the hardcoded false return value when OpenSL ES // input performance has been evaluated and tested more. - @CalledByNative public static boolean isLowLatencyInputSupported(Context context) { // TODO(henrika): investigate if some sort of device list is needed here // as well. The NDK doc states that: "As of API level 21, lower latency diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index d3304ce3d3b..5a72300c5c0 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -193,7 +193,7 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(long nativeAudioTrack) { - threadChecker.checkIsOnValidThread(); + threadChecker.detachThread(); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); this.nativeAudioTrack = nativeAudioTrack; audioManager = diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java index 0693922f709..2be436ab6db 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java @@ -35,16 +35,6 @@ final class WebRtcAudioUtils { private static final String TAG = "WebRtcAudioUtils"; - // List of devices where we have seen issues (e.g. bad audio quality) using - // the low latency output mode in combination with OpenSL ES. - // The device name is given by Build.MODEL. - private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] { - // It is recommended to maintain a list of blacklisted models outside - // this package and instead call - // WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true) - // from the client for devices where OpenSL ES shall be disabled. - }; - // List of devices where it has been verified that the built-in effect // bad and where it makes sense to avoid using it and instead rely on the // native WebRTC version instead. The device name is given by Build.MODEL. @@ -207,12 +197,6 @@ public static boolean runningOnEmulator() { return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_"); } - // Returns true if the device is blacklisted for OpenSL ES usage. - public static boolean deviceIsBlacklistedForOpenSLESUsage() { - List blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS); - return blackListedModels.contains(Build.MODEL); - } - // Information about the current build, taken from system properties. static void logDeviceInfo(String tag) { Logging.d(tag, diff --git a/sdk/android/src/jni/audio_device/aaudio_player.h b/sdk/android/src/jni/audio_device/aaudio_player.h index c23081a7038..0770b4691e1 100644 --- a/sdk/android/src/jni/audio_device/aaudio_player.h +++ b/sdk/android/src/jni/audio_device/aaudio_player.h @@ -22,6 +22,7 @@ #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" namespace webrtc { @@ -30,8 +31,6 @@ class FineAudioBuffer; namespace android_adm { -class AudioManager; - // Implements low-latency 16-bit mono PCM audio output support for Android // using the C based AAudio API. // @@ -52,30 +51,31 @@ class AudioManager; // where the internal AAudio buffer can be increased when needed. It will // reduce the risk of underruns (~glitches) at the expense of an increased // latency. -class AAudioPlayer final : public AAudioObserverInterface, +class AAudioPlayer final : public AudioOutput, + public AAudioObserverInterface, public rtc::MessageHandler { public: explicit AAudioPlayer(AudioManager* audio_manager); ~AAudioPlayer(); - int Init(); - int Terminate(); + int Init() override; + int Terminate() override; - int InitPlayout(); - bool PlayoutIsInitialized() const; + int InitPlayout() override; + bool PlayoutIsInitialized() const override; - int StartPlayout(); - int StopPlayout(); - bool Playing() const; + int StartPlayout() override; + int StopPlayout() override; + bool Playing() const override; - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; // Not implemented in AAudio. - bool SpeakerVolumeIsAvailable(); - int SetSpeakerVolume(uint32_t volume); - rtc::Optional SpeakerVolume() const; - rtc::Optional MaxSpeakerVolume() const; - rtc::Optional MinSpeakerVolume() const; + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + rtc::Optional SpeakerVolume() const override; + rtc::Optional MaxSpeakerVolume() const override; + rtc::Optional MinSpeakerVolume() const override; protected: // AAudioObserverInterface implementation. diff --git a/sdk/android/src/jni/audio_device/aaudio_recorder.h b/sdk/android/src/jni/audio_device/aaudio_recorder.h index 9924b75ff97..a424c47e26c 100644 --- a/sdk/android/src/jni/audio_device/aaudio_recorder.h +++ b/sdk/android/src/jni/audio_device/aaudio_recorder.h @@ -20,6 +20,7 @@ #include "rtc_base/thread.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/aaudio_wrapper.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" namespace webrtc { @@ -28,8 +29,6 @@ class AudioDeviceBuffer; namespace android_adm { -class AudioManager; - // Implements low-latency 16-bit mono PCM audio input support for Android // using the C based AAudio API. // @@ -44,30 +43,29 @@ class AudioManager; // // TODO(henrika): add comments about device changes and adaptive buffer // management. -class AAudioRecorder : public AAudioObserverInterface, +class AAudioRecorder : public AudioInput, + public AAudioObserverInterface, public rtc::MessageHandler { public: explicit AAudioRecorder(AudioManager* audio_manager); ~AAudioRecorder(); - int Init(); - int Terminate(); - - int InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } + int Init() override; + int Terminate() override; - int StartRecording(); - int StopRecording(); - bool Recording() const { return recording_; } + int InitRecording() override; + bool RecordingIsInitialized() const override { return initialized_; } - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + int StartRecording() override; + int StopRecording() override; + bool Recording() const override { return recording_; } - double latency_millis() const { return latency_millis_; } + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; // TODO(henrika): add support using AAudio APIs when available. - int EnableBuiltInAEC(bool enable); - int EnableBuiltInAGC(bool enable); - int EnableBuiltInNS(bool enable); + int EnableBuiltInAEC(bool enable) override; + int EnableBuiltInAGC(bool enable) override; + int EnableBuiltInNS(bool enable) override; protected: // AAudioObserverInterface implementation. diff --git a/sdk/android/src/jni/audio_device/audio_device_template_android.h b/sdk/android/src/jni/audio_device/audio_device_module.cc similarity index 89% rename from sdk/android/src/jni/audio_device/audio_device_template_android.h rename to sdk/android/src/jni/audio_device/audio_device_module.cc index 938a57b227b..e57d7c64c19 100644 --- a/sdk/android/src/jni/audio_device/audio_device_template_android.h +++ b/sdk/android/src/jni/audio_device/audio_device_module.cc @@ -8,16 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_ -#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_ +#include "sdk/android/src/jni/audio_device/audio_device_module.h" -#include +#include -#include "modules/audio_device/audio_device_buffer.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" +#include "rtc_base/refcountedobject.h" #include "rtc_base/thread_checker.h" -#include "sdk/android/src/jni/audio_device/audio_manager.h" #include "system_wrappers/include/metrics.h" #define CHECKinitialized_() \ @@ -38,6 +36,8 @@ namespace webrtc { namespace android_adm { +namespace { + // InputType/OutputType can be any class that implements the capturing/rendering // part of the AudioDeviceGeneric API. // Construction and destruction must be done on one and the same thread. Each @@ -47,9 +47,7 @@ namespace android_adm { // It is possible to call the two static methods (SetAndroidAudioDeviceObjects // and ClearAndroidAudioDeviceObjects) from a different thread but both will // RTC_CHECK that the calling thread is attached to a Java VM. - -template -class AudioDeviceTemplateAndroid : public AudioDeviceModule { +class AndroidAudioDeviceModule : public AudioDeviceModule { public: // For use with UMA logging. Must be kept in sync with histograms.xml in // Chrome, located at @@ -62,17 +60,23 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { NUM_STATUSES = 4 }; - AudioDeviceTemplateAndroid(JNIEnv* env, - const JavaParamRef& application_context, - AudioDeviceModule::AudioLayer audio_layer) + AndroidAudioDeviceModule(AudioDeviceModule::AudioLayer audio_layer, + std::unique_ptr audio_manager, + std::unique_ptr audio_input, + std::unique_ptr audio_output) : audio_layer_(audio_layer), - audio_manager_(env, audio_layer, application_context), + audio_manager_(std::move(audio_manager)), + input_(std::move(audio_input)), + output_(std::move(audio_output)), initialized_(false) { + RTC_CHECK(input_); + RTC_CHECK(output_); + RTC_CHECK(audio_manager_); RTC_LOG(INFO) << __FUNCTION__; thread_checker_.DetachFromThread(); } - virtual ~AudioDeviceTemplateAndroid() { RTC_LOG(INFO) << __FUNCTION__; } + virtual ~AndroidAudioDeviceModule() { RTC_LOG(INFO) << __FUNCTION__; } int32_t ActiveAudioLayer( AudioDeviceModule::AudioLayer* audioLayer) const override { @@ -89,22 +93,20 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t Init() override { RTC_LOG(INFO) << __FUNCTION__; RTC_DCHECK(thread_checker_.CalledOnValidThread()); - output_ = rtc::MakeUnique(&audio_manager_); - input_ = rtc::MakeUnique(&audio_manager_); audio_device_buffer_ = rtc::MakeUnique(); AttachAudioBuffer(); if (initialized_) { return 0; } InitStatus status; - if (!audio_manager_.Init()) { + if (!audio_manager_->Init()) { status = InitStatus::OTHER_ERROR; } else if (output_->Init() != 0) { - audio_manager_.Close(); + audio_manager_->Close(); status = InitStatus::PLAYOUT_ERROR; } else if (input_->Init() != 0) { output_->Terminate(); - audio_manager_.Close(); + audio_manager_->Close(); status = InitStatus::RECORDING_ERROR; } else { initialized_ = true; @@ -127,7 +129,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { RTC_DCHECK(thread_checker_.CalledOnValidThread()); int32_t err = input_->Terminate(); err |= output_->Terminate(); - err |= !audio_manager_.Close(); + err |= !audio_manager_->Close(); initialized_ = false; RTC_DCHECK_EQ(err, 0); return err; @@ -250,7 +252,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { return 0; } audio_device_buffer_->StartPlayout(); - if (!audio_manager_.IsCommunicationModeEnabled()) { + if (!audio_manager_->IsCommunicationModeEnabled()) { RTC_LOG(WARNING) << "The application should use MODE_IN_COMMUNICATION audio mode!"; } @@ -288,7 +290,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { if (Recording()) { return 0; } - if (!audio_manager_.IsCommunicationModeEnabled()) { + if (!audio_manager_->IsCommunicationModeEnabled()) { RTC_LOG(WARNING) << "The application should use MODE_IN_COMMUNICATION audio mode!"; } @@ -474,7 +476,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t StereoPlayoutIsAvailable(bool* available) const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized_(); - *available = audio_manager_.IsStereoPlayoutSupported(); + *available = audio_manager_->IsStereoPlayoutSupported(); RTC_LOG(INFO) << "output: " << *available; return 0; } @@ -486,7 +488,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { RTC_LOG(WARNING) << "recording in stereo is not supported"; return -1; } - bool available = audio_manager_.IsStereoPlayoutSupported(); + bool available = audio_manager_->IsStereoPlayoutSupported(); // Android does not support changes between mono and stero on the fly. // Instead, the native audio layer is configured via the audio manager // to either support mono or stereo. It is allowed to call this method @@ -506,7 +508,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t StereoPlayout(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized_(); - *enabled = audio_manager_.IsStereoPlayoutSupported(); + *enabled = audio_manager_->IsStereoPlayoutSupported(); RTC_LOG(INFO) << "output: " << *enabled; return 0; } @@ -514,7 +516,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t StereoRecordingIsAvailable(bool* available) const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized_(); - *available = audio_manager_.IsStereoRecordSupported(); + *available = audio_manager_->IsStereoRecordSupported(); RTC_LOG(INFO) << "output: " << *available; return 0; } @@ -526,7 +528,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { RTC_LOG(WARNING) << "recording in stereo is not supported"; return -1; } - bool available = audio_manager_.IsStereoRecordSupported(); + bool available = audio_manager_->IsStereoRecordSupported(); // Android does not support changes between mono and stero on the fly. // Instead, the native audio layer is configured via the audio manager // to either support mono or stereo. It is allowed to call this method @@ -546,7 +548,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t StereoRecording(bool* enabled) const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized_(); - *enabled = audio_manager_.IsStereoRecordSupported(); + *enabled = audio_manager_->IsStereoRecordSupported(); RTC_LOG(INFO) << "output: " << *enabled; return 0; } @@ -554,7 +556,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { int32_t PlayoutDelay(uint16_t* delay_ms) const override { CHECKinitialized_(); // Best guess we can do is to use half of the estimated total delay. - *delay_ms = audio_manager_.GetDelayEstimateInMilliseconds() / 2; + *delay_ms = audio_manager_->GetDelayEstimateInMilliseconds() / 2; RTC_DCHECK_GT(*delay_ms, 0); return 0; } @@ -574,7 +576,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { bool BuiltInAECIsAvailable() const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized__BOOL(); - bool isAvailable = audio_manager_.IsAcousticEchoCancelerSupported(); + bool isAvailable = audio_manager_->IsAcousticEchoCancelerSupported(); RTC_LOG(INFO) << "output: " << isAvailable; return isAvailable; } @@ -598,7 +600,7 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { bool BuiltInNSIsAvailable() const override { RTC_LOG(INFO) << __FUNCTION__; CHECKinitialized__BOOL(); - bool isAvailable = audio_manager_.IsNoiseSuppressorSupported(); + bool isAvailable = audio_manager_->IsNoiseSuppressorSupported(); RTC_LOG(INFO) << "output: " << isAvailable; return isAvailable; } @@ -644,17 +646,26 @@ class AudioDeviceTemplateAndroid : public AudioDeviceModule { rtc::ThreadChecker thread_checker_; const AudioDeviceModule::AudioLayer audio_layer_; - - AudioManager audio_manager_; - std::unique_ptr output_; - std::unique_ptr input_; + const std::unique_ptr audio_manager_; + const std::unique_ptr input_; + const std::unique_ptr output_; std::unique_ptr audio_device_buffer_; bool initialized_; }; +} // namespace + +rtc::scoped_refptr CreateAudioDeviceModuleFromInputAndOutput( + AudioDeviceModule::AudioLayer audio_layer, + std::unique_ptr audio_manager, + std::unique_ptr audio_input, + std::unique_ptr audio_output) { + return new rtc::RefCountedObject( + audio_layer, std::move(audio_manager), std::move(audio_input), + std::move(audio_output)); +} + } // namespace android_adm } // namespace webrtc - -#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_TEMPLATE_ANDROID_H_ diff --git a/sdk/android/src/jni/audio_device/audio_device_module.h b/sdk/android/src/jni/audio_device/audio_device_module.h new file mode 100644 index 00000000000..b11beb452b7 --- /dev/null +++ b/sdk/android/src/jni/audio_device/audio_device_module.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ + +#include + +#include "api/optional.h" +#include "modules/audio_device/audio_device_buffer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/audio_device/audio_manager.h" + +namespace webrtc { + +namespace android_adm { + +class AudioManager; + +class AudioInput { + public: + virtual ~AudioInput() {} + + virtual int32_t Init() = 0; + virtual int32_t Terminate() = 0; + + virtual int32_t InitRecording() = 0; + virtual bool RecordingIsInitialized() const = 0; + + virtual int32_t StartRecording() = 0; + virtual int32_t StopRecording() = 0; + virtual bool Recording() const = 0; + + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; + + virtual int32_t EnableBuiltInAEC(bool enable) = 0; + virtual int32_t EnableBuiltInAGC(bool enable) = 0; + virtual int32_t EnableBuiltInNS(bool enable) = 0; +}; + +class AudioOutput { + public: + virtual ~AudioOutput() {} + + virtual int32_t Init() = 0; + virtual int32_t Terminate() = 0; + virtual int32_t InitPlayout() = 0; + virtual bool PlayoutIsInitialized() const = 0; + virtual int32_t StartPlayout() = 0; + virtual int32_t StopPlayout() = 0; + virtual bool Playing() const = 0; + virtual bool SpeakerVolumeIsAvailable() = 0; + virtual int SetSpeakerVolume(uint32_t volume) = 0; + virtual rtc::Optional SpeakerVolume() const = 0; + virtual rtc::Optional MaxSpeakerVolume() const = 0; + virtual rtc::Optional MinSpeakerVolume() const = 0; + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; +}; + +rtc::scoped_refptr CreateAudioDeviceModuleFromInputAndOutput( + AudioDeviceModule::AudioLayer audio_layer, + std::unique_ptr audio_manager, + std::unique_ptr audio_input, + std::unique_ptr audio_output); + +} // namespace android_adm + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_DEVICE_MODULE_H_ diff --git a/sdk/android/src/jni/audio_device/audio_manager.cc b/sdk/android/src/jni/audio_device/audio_manager.cc index 8e09d8a7c9a..41943dca6de 100644 --- a/sdk/android/src/jni/audio_device/audio_manager.cc +++ b/sdk/android/src/jni/audio_device/audio_manager.cc @@ -28,7 +28,7 @@ #include "sdk/android/src/jni/audio_device/aaudio_player.h" #include "sdk/android/src/jni/audio_device/aaudio_recorder.h" #endif -#include "sdk/android/src/jni/audio_device/audio_device_template_android.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" #include "sdk/android/src/jni/audio_device/audio_record_jni.h" #include "sdk/android/src/jni/audio_device/audio_track_jni.h" @@ -39,64 +39,6 @@ namespace webrtc { namespace android_adm { -#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) -rtc::scoped_refptr -AudioManager::CreateAAudioAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context) { - RTC_LOG(INFO) << __FUNCTION__; - return new rtc::RefCountedObject>( - env, application_context, AudioDeviceModule::kAndroidAAudioAudio); -} -#endif - -rtc::scoped_refptr AudioManager::CreateAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context) { - const bool use_opensles_output = - !Java_WebRtcAudioManager_isDeviceBlacklistedForOpenSLESUsage(env) && - Java_WebRtcAudioManager_isLowLatencyOutputSupported(env, - application_context); - const bool use_opensles_input = - use_opensles_output && Java_WebRtcAudioManager_isLowLatencyInputSupported( - env, application_context); - return CreateAudioDeviceModule(env, application_context, use_opensles_input, - use_opensles_output); -} - -rtc::scoped_refptr AudioManager::CreateAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context, - bool use_opensles_input, - bool use_opensles_output) { - RTC_LOG(INFO) << __FUNCTION__; - - if (use_opensles_output) { - if (use_opensles_input) { - // Use OpenSL ES for both playout and recording. - return new rtc::RefCountedObject>( - env, application_context, AudioDeviceModule::kAndroidOpenSLESAudio); - } else { - // Use OpenSL ES for output and AudioRecord API for input. This - // combination provides low-latency output audio and at the same - // time support for HW AEC using the AudioRecord Java API. - return new rtc::RefCountedObject>( - env, application_context, - AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio); - } - } else { - RTC_DCHECK(!use_opensles_input) - << "Combination of OpenSLES input and Java-based output not supported"; - // Use Java-based audio in both directions. - return new rtc::RefCountedObject>( - env, application_context, AudioDeviceModule::kAndroidJavaAudio); - } -} - // AudioManager implementation AudioManager::AudioManager(JNIEnv* env, AudioDeviceModule::AudioLayer audio_layer, @@ -120,6 +62,8 @@ AudioManager::AudioManager(JNIEnv* env, static_cast(output_buffer_size)); record_parameters_.reset(sample_rate, static_cast(input_channels), static_cast(input_buffer_size)); + RTC_CHECK(playout_parameters_.is_valid()); + RTC_CHECK(record_parameters_.is_valid()); thread_checker_.DetachFromThread(); } @@ -217,12 +161,10 @@ bool AudioManager::IsNoiseSuppressorSupported() const { } bool AudioManager::IsStereoPlayoutSupported() const { - RTC_DCHECK(thread_checker_.CalledOnValidThread()); return (playout_parameters_.channels() == 2); } bool AudioManager::IsStereoRecordSupported() const { - RTC_DCHECK(thread_checker_.CalledOnValidThread()); return (record_parameters_.channels() == 2); } @@ -234,13 +176,11 @@ int AudioManager::GetDelayEstimateInMilliseconds() const { const AudioParameters& AudioManager::GetPlayoutAudioParameters() { RTC_CHECK(playout_parameters_.is_valid()); - RTC_DCHECK(thread_checker_.CalledOnValidThread()); return playout_parameters_; } const AudioParameters& AudioManager::GetRecordAudioParameters() { RTC_CHECK(record_parameters_.is_valid()); - RTC_DCHECK(thread_checker_.CalledOnValidThread()); return record_parameters_; } diff --git a/sdk/android/src/jni/audio_device/audio_manager.h b/sdk/android/src/jni/audio_device/audio_manager.h index 201b3635665..5f32e2a2a1f 100644 --- a/sdk/android/src/jni/audio_device/audio_manager.h +++ b/sdk/android/src/jni/audio_device/audio_manager.h @@ -33,24 +33,6 @@ namespace android_adm { // unless Init() is called. class AudioManager { public: -#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO) - static rtc::scoped_refptr CreateAAudioAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context); -#endif - - static rtc::scoped_refptr CreateAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context, - bool use_opensles_input, - bool use_opensles_output); - - // This function has internal logic checking if OpenSLES is blacklisted and - // whether it's supported. - static rtc::scoped_refptr CreateAudioDeviceModule( - JNIEnv* env, - const JavaParamRef& application_context); - AudioManager(JNIEnv* env, AudioDeviceModule::AudioLayer audio_layer, const JavaParamRef& application_context); diff --git a/sdk/android/src/jni/audio_device/audio_record_jni.cc b/sdk/android/src/jni/audio_device/audio_record_jni.cc index 08c54a98c89..93675f74623 100644 --- a/sdk/android/src/jni/audio_device/audio_record_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_record_jni.cc @@ -45,13 +45,13 @@ class ScopedHistogramTimer { const std::string histogram_name_; int64_t start_time_ms_; }; + } // namespace // AudioRecordJni implementation. AudioRecordJni::AudioRecordJni(AudioManager* audio_manager) - : env_(AttachCurrentThreadIfNeeded()), - j_audio_record_( - Java_WebRtcAudioRecord_Constructor(env_, + : j_audio_record_( + Java_WebRtcAudioRecord_Constructor(AttachCurrentThreadIfNeeded(), jni::jlongFromPointer(this))), audio_manager_(audio_manager), audio_parameters_(audio_manager->GetRecordAudioParameters()), @@ -64,8 +64,9 @@ AudioRecordJni::AudioRecordJni(AudioManager* audio_manager) audio_device_buffer_(nullptr) { RTC_LOG(INFO) << "ctor"; RTC_DCHECK(audio_parameters_.is_valid()); - // Detach from this thread since we want to use the checker to verify calls - // from the Java based audio thread. + // Detach from this thread since construction is allowed to happen on a + // different thread. + thread_checker_.DetachFromThread(); thread_checker_java_.DetachFromThread(); } @@ -77,6 +78,7 @@ AudioRecordJni::~AudioRecordJni() { int32_t AudioRecordJni::Init() { RTC_LOG(INFO) << "Init"; + env_ = AttachCurrentThreadIfNeeded(); RTC_DCHECK(thread_checker_.CalledOnValidThread()); return 0; } diff --git a/sdk/android/src/jni/audio_device/audio_record_jni.h b/sdk/android/src/jni/audio_device/audio_record_jni.h index a8a31775bbe..40cfa0b60dc 100644 --- a/sdk/android/src/jni/audio_device/audio_record_jni.h +++ b/sdk/android/src/jni/audio_device/audio_record_jni.h @@ -17,6 +17,7 @@ #include "modules/audio_device/audio_device_buffer.h" #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/thread_checker.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" namespace webrtc { @@ -41,26 +42,26 @@ namespace android_adm { // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed // and detach when the object goes out of scope. Additional thread checking // guarantees that no other (possibly non attached) thread is used. -class AudioRecordJni { +class AudioRecordJni : public AudioInput { public: explicit AudioRecordJni(AudioManager* audio_manager); - ~AudioRecordJni(); + ~AudioRecordJni() override; - int32_t Init(); - int32_t Terminate(); + int32_t Init() override; + int32_t Terminate() override; - int32_t InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } + int32_t InitRecording() override; + bool RecordingIsInitialized() const override { return initialized_; } - int32_t StartRecording(); - int32_t StopRecording(); - bool Recording() const { return recording_; } + int32_t StartRecording() override; + int32_t StopRecording() override; + bool Recording() const override { return recording_; } - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; - int32_t EnableBuiltInAEC(bool enable); - int32_t EnableBuiltInAGC(bool enable); - int32_t EnableBuiltInNS(bool enable); + int32_t EnableBuiltInAEC(bool enable) override; + int32_t EnableBuiltInAGC(bool enable) override; + int32_t EnableBuiltInNS(bool enable) override; // Called from Java side so we can cache the address of the Java-manged // |byte_buffer| in |direct_buffer_address_|. The size of the buffer @@ -90,7 +91,7 @@ class AudioRecordJni { rtc::ThreadChecker thread_checker_java_; // Wraps the Java specific parts of the AudioRecordJni class. - JNIEnv* const env_; + JNIEnv* env_ = nullptr; ScopedJavaGlobalRef j_audio_record_; // Raw pointer to the audio manger. diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.cc b/sdk/android/src/jni/audio_device/audio_track_jni.cc index 48aab4467b0..c5ab6b33bc4 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.cc +++ b/sdk/android/src/jni/audio_device/audio_track_jni.cc @@ -27,9 +27,9 @@ namespace android_adm { // TODO(henrika): possible extend usage of AudioManager and add it as member. AudioTrackJni::AudioTrackJni(AudioManager* audio_manager) - : env_(AttachCurrentThreadIfNeeded()), - j_audio_track_( - Java_WebRtcAudioTrack_Constructor(env_, jni::jlongFromPointer(this))), + : j_audio_track_( + Java_WebRtcAudioTrack_Constructor(AttachCurrentThreadIfNeeded(), + jni::jlongFromPointer(this))), audio_parameters_(audio_manager->GetPlayoutAudioParameters()), direct_buffer_address_(nullptr), direct_buffer_capacity_in_bytes_(0), @@ -39,8 +39,9 @@ AudioTrackJni::AudioTrackJni(AudioManager* audio_manager) audio_device_buffer_(nullptr) { RTC_LOG(INFO) << "ctor"; RTC_DCHECK(audio_parameters_.is_valid()); - // Detach from this thread since we want to use the checker to verify calls - // from the Java based audio thread. + // Detach from this thread since construction is allowed to happen on a + // different thread. + thread_checker_.DetachFromThread(); thread_checker_java_.DetachFromThread(); } @@ -52,6 +53,7 @@ AudioTrackJni::~AudioTrackJni() { int32_t AudioTrackJni::Init() { RTC_LOG(INFO) << "Init"; + env_ = AttachCurrentThreadIfNeeded(); RTC_DCHECK(thread_checker_.CalledOnValidThread()); return 0; } diff --git a/sdk/android/src/jni/audio_device/audio_track_jni.h b/sdk/android/src/jni/audio_device/audio_track_jni.h index c73bd70f363..d56f30ce18e 100644 --- a/sdk/android/src/jni/audio_device/audio_track_jni.h +++ b/sdk/android/src/jni/audio_device/audio_track_jni.h @@ -19,6 +19,7 @@ #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" namespace webrtc { @@ -38,28 +39,28 @@ namespace android_adm { // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed // and detach when the object goes out of scope. Additional thread checking // guarantees that no other (possibly non attached) thread is used. -class AudioTrackJni { +class AudioTrackJni : public AudioOutput { public: explicit AudioTrackJni(AudioManager* audio_manager); - ~AudioTrackJni(); + ~AudioTrackJni() override; - int32_t Init(); - int32_t Terminate(); + int32_t Init() override; + int32_t Terminate() override; - int32_t InitPlayout(); - bool PlayoutIsInitialized() const { return initialized_; } + int32_t InitPlayout() override; + bool PlayoutIsInitialized() const override { return initialized_; } - int32_t StartPlayout(); - int32_t StopPlayout(); - bool Playing() const { return playing_; } + int32_t StartPlayout() override; + int32_t StopPlayout() override; + bool Playing() const override { return playing_; } - bool SpeakerVolumeIsAvailable(); - int SetSpeakerVolume(uint32_t volume); - rtc::Optional SpeakerVolume() const; - rtc::Optional MaxSpeakerVolume() const; - rtc::Optional MinSpeakerVolume() const; + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + rtc::Optional SpeakerVolume() const override; + rtc::Optional MaxSpeakerVolume() const override; + rtc::Optional MinSpeakerVolume() const override; - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; // Called from Java side so we can cache the address of the Java-manged // |byte_buffer| in |direct_buffer_address_|. The size of the buffer @@ -86,7 +87,7 @@ class AudioTrackJni { rtc::ThreadChecker thread_checker_java_; // Wraps the Java specific parts of the AudioTrackJni class. - JNIEnv* const env_; + JNIEnv* env_ = nullptr; ScopedJavaGlobalRef j_audio_track_; // Contains audio parameters provided to this class at construction by the diff --git a/sdk/android/src/jni/audio_device/opensles_player.h b/sdk/android/src/jni/audio_device/opensles_player.h index 70629eaa4c5..34d3c5366af 100644 --- a/sdk/android/src/jni/audio_device/opensles_player.h +++ b/sdk/android/src/jni/audio_device/opensles_player.h @@ -22,6 +22,7 @@ #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" #include "sdk/android/src/jni/audio_device/opensles_common.h" @@ -49,7 +50,7 @@ namespace android_adm { // If the device doesn't claim this feature but supports API level 9 (Android // platform version 2.3) or later, then we can still use the OpenSL ES APIs but // the output latency may be higher. -class OpenSLESPlayer { +class OpenSLESPlayer : public AudioOutput { public: // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is // required for lower latency. Beginning with API level 18 (Android 4.3), a @@ -60,25 +61,25 @@ class OpenSLESPlayer { static const int kNumOfOpenSLESBuffers = 2; explicit OpenSLESPlayer(AudioManager* audio_manager); - ~OpenSLESPlayer(); + ~OpenSLESPlayer() override; - int Init(); - int Terminate(); + int Init() override; + int Terminate() override; - int InitPlayout(); - bool PlayoutIsInitialized() const { return initialized_; } + int InitPlayout() override; + bool PlayoutIsInitialized() const override { return initialized_; } - int StartPlayout(); - int StopPlayout(); - bool Playing() const { return playing_; } + int StartPlayout() override; + int StopPlayout() override; + bool Playing() const override { return playing_; } - bool SpeakerVolumeIsAvailable(); - int SetSpeakerVolume(uint32_t volume); - rtc::Optional SpeakerVolume() const; - rtc::Optional MaxSpeakerVolume() const; - rtc::Optional MinSpeakerVolume() const; + bool SpeakerVolumeIsAvailable() override; + int SetSpeakerVolume(uint32_t volume) override; + rtc::Optional SpeakerVolume() const override; + rtc::Optional MaxSpeakerVolume() const override; + rtc::Optional MinSpeakerVolume() const override; - void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); + void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; private: // These callback methods are called when data is required for playout. diff --git a/sdk/android/src/jni/audio_device/opensles_recorder.h b/sdk/android/src/jni/audio_device/opensles_recorder.h index 556e578f856..81efcb865e6 100644 --- a/sdk/android/src/jni/audio_device/opensles_recorder.h +++ b/sdk/android/src/jni/audio_device/opensles_recorder.h @@ -22,6 +22,7 @@ #include "modules/audio_device/include/audio_device_defines.h" #include "rtc_base/thread_checker.h" #include "sdk/android/src/jni/audio_device/audio_common.h" +#include "sdk/android/src/jni/audio_device/audio_device_module.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" #include "sdk/android/src/jni/audio_device/opensles_common.h" @@ -52,7 +53,7 @@ namespace android_adm { // for input effects preclude the lower latency path. // See https://developer.android.com/ndk/guides/audio/opensl-prog-notes.html // for more details. -class OpenSLESRecorder { +class OpenSLESRecorder : public AudioInput { public: // Beginning with API level 17 (Android 4.2), a buffer count of 2 or more is // required for lower latency. Beginning with API level 18 (Android 4.3), a @@ -63,24 +64,24 @@ class OpenSLESRecorder { static const int kNumOfOpenSLESBuffers = 2; explicit OpenSLESRecorder(AudioManager* audio_manager); - ~OpenSLESRecorder(); + ~OpenSLESRecorder() override; - int Init(); - int Terminate(); + int Init() override; + int Terminate() override; - int InitRecording(); - bool RecordingIsInitialized() const { return initialized_; } + int InitRecording() override; + bool RecordingIsInitialized() const override { return initialized_; } - int StartRecording(); - int StopRecording(); - bool Recording() const { return recording_; } + int StartRecording() override; + int StopRecording() override; + bool Recording() const override { return recording_; } - void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer); + void AttachAudioBuffer(AudioDeviceBuffer* audio_buffer) override; // TODO(henrika): add support using OpenSL ES APIs when available. - int EnableBuiltInAEC(bool enable); - int EnableBuiltInAGC(bool enable); - int EnableBuiltInNS(bool enable); + int EnableBuiltInAEC(bool enable) override; + int EnableBuiltInAGC(bool enable) override; + int EnableBuiltInNS(bool enable) override; private: // Obtaines the SL Engine Interface from the existing global Engine object. diff --git a/sdk/android/src/jni/pc/peerconnectionfactory.cc b/sdk/android/src/jni/pc/peerconnectionfactory.cc index 62873bdf9a8..04874bbe3f1 100644 --- a/sdk/android/src/jni/pc/peerconnectionfactory.cc +++ b/sdk/android/src/jni/pc/peerconnectionfactory.cc @@ -27,6 +27,8 @@ #include "sdk/android/generated_peerconnection_jni/jni/PeerConnectionFactory_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/audio_device/audio_manager.h" +#include "sdk/android/src/jni/audio_device/audio_record_jni.h" +#include "sdk/android/src/jni/audio_device/audio_track_jni.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/pc/androidnetworkmonitor.h" #include "sdk/android/src/jni/pc/audio.h" @@ -238,10 +240,23 @@ jlong CreatePeerConnectionFactoryForJava( rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory); } - rtc::scoped_refptr adm = - field_trial::IsEnabled(kExternalAndroidAudioDeviceFieldTrialName) - ? android_adm::AudioManager::CreateAudioDeviceModule(jni, jcontext) - : nullptr; + rtc::scoped_refptr adm = nullptr; + if (field_trial::IsEnabled(kExternalAndroidAudioDeviceFieldTrialName)) { + // Only Java AudioDeviceModule is supported as an external ADM at the + // moment. + const AudioDeviceModule::AudioLayer audio_layer = + AudioDeviceModule::kAndroidJavaAudio; + auto audio_manager = + rtc::MakeUnique(jni, audio_layer, jcontext); + auto audio_input = + rtc::MakeUnique(audio_manager.get()); + auto audio_output = + rtc::MakeUnique(audio_manager.get()); + adm = CreateAudioDeviceModuleFromInputAndOutput( + audio_layer, std::move(audio_manager), std::move(audio_input), + std::move(audio_output)); + } + rtc::scoped_refptr audio_mixer = nullptr; std::unique_ptr call_factory(CreateCallFactory()); std::unique_ptr rtc_event_log_factory(