Skip to content

Commit

Permalink
Android audio code: Replace C++ template with input/output interface
Browse files Browse the repository at this point in the history
Bug: webrtc:7452
Change-Id: Id816500051e065918bba5c2235d38ad8eb50a8eb
Reviewed-on: https://webrtc-review.googlesource.com/64442
Commit-Queue: Magnus Jedvert <[email protected]>
Reviewed-by: Paulina Hensman <[email protected]>
Cr-Commit-Position: refs/heads/master@{#22660}
  • Loading branch information
Hnoo112233 authored and Commit Bot committed Mar 28, 2018
1 parent 85eef49 commit 1a18e0a
Show file tree
Hide file tree
Showing 21 changed files with 321 additions and 286 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -595,10 +595,9 @@ void setupAudioDevice() {
// Enable/disable OpenSL ES playback.
if (!peerConnectionParameters.useOpenSLES) {
Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
} else {
Log.d(TAG, "Allow OpenSL ES audio if device supports it");
AudioDeviceModule.setBlacklistDeviceForOpenSLESUsage(false);
// TODO(magjed): Add support for external OpenSLES ADM.
}

if (peerConnectionParameters.disableBuiltInAEC) {
Expand Down
3 changes: 2 additions & 1 deletion sdk/android/BUILD.gn
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,8 @@ rtc_source_set("audio_device_jni") {

sources = [
"src/jni/audio_device/audio_common.h",
"src/jni/audio_device/audio_device_template_android.h",
"src/jni/audio_device/audio_device_module.cc",
"src/jni/audio_device/audio_device_module.h",
"src/jni/audio_device/audio_manager.cc",
"src/jni/audio_device/audio_manager.h",
"src/jni/audio_device/audio_record_jni.cc",
Expand Down
4 changes: 0 additions & 4 deletions sdk/android/api/org/webrtc/audio/AudioDeviceModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,6 @@ public class AudioDeviceModule {
public AudioDeviceModule() {}

/* AudioManager */
public static void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(enable);
}

public static void setStereoInput(boolean enable) {
WebRtcAudioManager.setStereoInput(enable);
}
Expand Down
65 changes: 55 additions & 10 deletions sdk/android/native_api/audio_device_module/audio_device_android.cc
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,76 @@
#include "sdk/android/native_api/audio_device_module/audio_device_android.h"

#include <stdlib.h>
#include <utility>

#include "rtc_base/logging.h"
#include "rtc_base/ptr_util.h"
#include "rtc_base/refcount.h"
#include "rtc_base/refcountedobject.h"
#include "system_wrappers/include/metrics.h"
#include "sdk/android/src/jni/audio_device/aaudio_player.h"
#include "sdk/android/src/jni/audio_device/aaudio_recorder.h"
#include "sdk/android/src/jni/audio_device/audio_manager.h"
#include "sdk/android/src/jni/audio_device/audio_record_jni.h"
#include "sdk/android/src/jni/audio_device/audio_track_jni.h"
#include "sdk/android/src/jni/audio_device/opensles_player.h"
#include "sdk/android/src/jni/audio_device/opensles_recorder.h"
#include "system_wrappers/include/metrics.h"

namespace webrtc {

namespace {

// This template function takes care of some boiler plate.
template <typename AudioInputT, typename AudioOutputT>
rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModuleTemplate(
AudioDeviceModule::AudioLayer audio_layer,
JNIEnv* env,
jobject application_context) {
auto audio_manager = rtc::MakeUnique<android_adm::AudioManager>(
env, audio_layer, JavaParamRef<jobject>(application_context));
auto audio_input = rtc::MakeUnique<AudioInputT>(audio_manager.get());
auto audio_output = rtc::MakeUnique<AudioOutputT>(audio_manager.get());
return CreateAudioDeviceModuleFromInputAndOutput(
audio_layer, std::move(audio_manager), std::move(audio_input),
std::move(audio_output));
}

} // namespace

#if defined(AUDIO_DEVICE_INCLUDE_ANDROID_AAUDIO)
rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
JNIEnv* env,
jobject application_context) {
return android_adm::AudioManager::CreateAAudioAudioDeviceModule(
env, JavaParamRef<jobject>(application_context));
RTC_LOG(INFO) << __FUNCTION__;
return CreateAudioDeviceModuleTemplate<android_adm::AAudioRecorder,
android_adm::AAudioPlayer>(
AudioDeviceModule::kAndroidAAudioAudio, env, application_context);
}
#endif

rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
JNIEnv* env,
jobject application_context) {
return CreateAudioDeviceModuleTemplate<android_adm::AudioRecordJni,
android_adm::AudioTrackJni>(
AudioDeviceModule::kAndroidJavaAudio, env, application_context);
}

rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
JNIEnv* env,
jobject application_context,
bool use_opensles_input,
bool use_opensles_output) {
return android_adm::AudioManager::CreateAudioDeviceModule(
env, JavaParamRef<jobject>(application_context), use_opensles_input,
use_opensles_output);
jobject application_context) {
return CreateAudioDeviceModuleTemplate<android_adm::OpenSLESRecorder,
android_adm::OpenSLESPlayer>(
AudioDeviceModule::kAndroidJavaAudio, env, application_context);
}

rtc::scoped_refptr<AudioDeviceModule>
CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
jobject application_context) {
return CreateAudioDeviceModuleTemplate<android_adm::AudioRecordJni,
android_adm::OpenSLESPlayer>(
AudioDeviceModule::kAndroidJavaInputAndOpenSLESOutputAudio, env,
application_context);
}

} // namespace webrtc
14 changes: 10 additions & 4 deletions sdk/android/native_api/audio_device_module/audio_device_android.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,17 @@ rtc::scoped_refptr<AudioDeviceModule> CreateAAudioAudioDeviceModule(
jobject application_context);
#endif

rtc::scoped_refptr<AudioDeviceModule> CreateAudioDeviceModule(
rtc::scoped_refptr<AudioDeviceModule> CreateJavaAudioDeviceModule(
JNIEnv* env,
jobject application_context,
bool use_opensles_input,
bool use_opensles_output);
jobject application_context);

rtc::scoped_refptr<AudioDeviceModule> CreateOpenSLESAudioDeviceModule(
JNIEnv* env,
jobject application_context);

rtc::scoped_refptr<AudioDeviceModule>
CreateJavaInputAndOpenSLESOutputAudioDeviceModule(JNIEnv* env,
jobject application_context);

} // namespace webrtc

Expand Down
27 changes: 0 additions & 27 deletions sdk/android/src/java/org/webrtc/audio/WebRtcAudioManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,20 +42,6 @@ class WebRtcAudioManager {
private static boolean useStereoOutput = false;
private static boolean useStereoInput = false;

private static boolean blacklistDeviceForOpenSLESUsage = false;
private static boolean blacklistDeviceForOpenSLESUsageIsOverridden = false;

// Call this method to override the default list of blacklisted devices
// specified in WebRtcAudioUtils.BLACKLISTED_OPEN_SL_ES_MODELS.
// Allows an app to take control over which devices to exclude from using
// the OpenSL ES audio output path
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public static synchronized void setBlacklistDeviceForOpenSLESUsage(boolean enable) {
blacklistDeviceForOpenSLESUsageIsOverridden = true;
blacklistDeviceForOpenSLESUsage = enable;
}

// Call these methods to override the default mono audio modes for the specified direction(s)
// (input and/or output).
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
Expand Down Expand Up @@ -204,27 +190,14 @@ private boolean isCommunicationModeEnabled() {
return (audioManager.getMode() == AudioManager.MODE_IN_COMMUNICATION);
}

@CalledByNative
private static boolean isDeviceBlacklistedForOpenSLESUsage() {
boolean blacklisted = blacklistDeviceForOpenSLESUsageIsOverridden
? blacklistDeviceForOpenSLESUsage
: WebRtcAudioUtils.deviceIsBlacklistedForOpenSLESUsage();
if (blacklisted) {
Logging.d(TAG, Build.MODEL + " is blacklisted for OpenSL ES usage!");
}
return blacklisted;
}

// Returns true if low-latency audio output is supported.
@CalledByNative
public static boolean isLowLatencyOutputSupported(Context context) {
return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_AUDIO_LOW_LATENCY);
}

// Returns true if low-latency audio input is supported.
// TODO(henrika): remove the hardcoded false return value when OpenSL ES
// input performance has been evaluated and tested more.
@CalledByNative
public static boolean isLowLatencyInputSupported(Context context) {
// TODO(henrika): investigate if some sort of device list is needed here
// as well. The NDK doc states that: "As of API level 21, lower latency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ public void stopThread() {

@CalledByNative
WebRtcAudioTrack(long nativeAudioTrack) {
threadChecker.checkIsOnValidThread();
threadChecker.detachThread();
Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
this.nativeAudioTrack = nativeAudioTrack;
audioManager =
Expand Down
16 changes: 0 additions & 16 deletions sdk/android/src/java/org/webrtc/audio/WebRtcAudioUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,6 @@
final class WebRtcAudioUtils {
private static final String TAG = "WebRtcAudioUtils";

// List of devices where we have seen issues (e.g. bad audio quality) using
// the low latency output mode in combination with OpenSL ES.
// The device name is given by Build.MODEL.
private static final String[] BLACKLISTED_OPEN_SL_ES_MODELS = new String[] {
// It is recommended to maintain a list of blacklisted models outside
// this package and instead call
// WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true)
// from the client for devices where OpenSL ES shall be disabled.
};

// List of devices where it has been verified that the built-in effect
// bad and where it makes sense to avoid using it and instead rely on the
// native WebRTC version instead. The device name is given by Build.MODEL.
Expand Down Expand Up @@ -207,12 +197,6 @@ public static boolean runningOnEmulator() {
return Build.HARDWARE.equals("goldfish") && Build.BRAND.startsWith("generic_");
}

// Returns true if the device is blacklisted for OpenSL ES usage.
public static boolean deviceIsBlacklistedForOpenSLESUsage() {
List<String> blackListedModels = Arrays.asList(BLACKLISTED_OPEN_SL_ES_MODELS);
return blackListedModels.contains(Build.MODEL);
}

// Information about the current build, taken from system properties.
static void logDeviceInfo(String tag) {
Logging.d(tag,
Expand Down
32 changes: 16 additions & 16 deletions sdk/android/src/jni/audio_device/aaudio_player.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
#include "sdk/android/src/jni/audio_device/audio_device_module.h"

namespace webrtc {

Expand All @@ -30,8 +31,6 @@ class FineAudioBuffer;

namespace android_adm {

class AudioManager;

// Implements low-latency 16-bit mono PCM audio output support for Android
// using the C based AAudio API.
//
Expand All @@ -52,30 +51,31 @@ class AudioManager;
// where the internal AAudio buffer can be increased when needed. It will
// reduce the risk of underruns (~glitches) at the expense of an increased
// latency.
class AAudioPlayer final : public AAudioObserverInterface,
class AAudioPlayer final : public AudioOutput,
public AAudioObserverInterface,
public rtc::MessageHandler {
public:
explicit AAudioPlayer(AudioManager* audio_manager);
~AAudioPlayer();

int Init();
int Terminate();
int Init() override;
int Terminate() override;

int InitPlayout();
bool PlayoutIsInitialized() const;
int InitPlayout() override;
bool PlayoutIsInitialized() const override;

int StartPlayout();
int StopPlayout();
bool Playing() const;
int StartPlayout() override;
int StopPlayout() override;
bool Playing() const override;

void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

// Not implemented in AAudio.
bool SpeakerVolumeIsAvailable();
int SetSpeakerVolume(uint32_t volume);
rtc::Optional<uint32_t> SpeakerVolume() const;
rtc::Optional<uint32_t> MaxSpeakerVolume() const;
rtc::Optional<uint32_t> MinSpeakerVolume() const;
bool SpeakerVolumeIsAvailable() override;
int SetSpeakerVolume(uint32_t volume) override;
rtc::Optional<uint32_t> SpeakerVolume() const override;
rtc::Optional<uint32_t> MaxSpeakerVolume() const override;
rtc::Optional<uint32_t> MinSpeakerVolume() const override;

protected:
// AAudioObserverInterface implementation.
Expand Down
30 changes: 14 additions & 16 deletions sdk/android/src/jni/audio_device/aaudio_recorder.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
#include "rtc_base/thread.h"
#include "rtc_base/thread_checker.h"
#include "sdk/android/src/jni/audio_device/aaudio_wrapper.h"
#include "sdk/android/src/jni/audio_device/audio_device_module.h"

namespace webrtc {

Expand All @@ -28,8 +29,6 @@ class AudioDeviceBuffer;

namespace android_adm {

class AudioManager;

// Implements low-latency 16-bit mono PCM audio input support for Android
// using the C based AAudio API.
//
Expand All @@ -44,30 +43,29 @@ class AudioManager;
//
// TODO(henrika): add comments about device changes and adaptive buffer
// management.
class AAudioRecorder : public AAudioObserverInterface,
class AAudioRecorder : public AudioInput,
public AAudioObserverInterface,
public rtc::MessageHandler {
public:
explicit AAudioRecorder(AudioManager* audio_manager);
~AAudioRecorder();

int Init();
int Terminate();

int InitRecording();
bool RecordingIsInitialized() const { return initialized_; }
int Init() override;
int Terminate() override;

int StartRecording();
int StopRecording();
bool Recording() const { return recording_; }
int InitRecording() override;
bool RecordingIsInitialized() const override { return initialized_; }

void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
int StartRecording() override;
int StopRecording() override;
bool Recording() const override { return recording_; }

double latency_millis() const { return latency_millis_; }
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override;

// TODO(henrika): add support using AAudio APIs when available.
int EnableBuiltInAEC(bool enable);
int EnableBuiltInAGC(bool enable);
int EnableBuiltInNS(bool enable);
int EnableBuiltInAEC(bool enable) override;
int EnableBuiltInAGC(bool enable) override;
int EnableBuiltInNS(bool enable) override;

protected:
// AAudioObserverInterface implementation.
Expand Down
Loading

0 comments on commit 1a18e0a

Please sign in to comment.