diff --git a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java index 7d30ae66ed6..88fcec36974 100644 --- a/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java +++ b/examples/androidapp/src/org/appspot/apprtc/PeerConnectionClient.java @@ -61,7 +61,6 @@ import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode; import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback; import org.webrtc.voiceengine.WebRtcAudioTrack; -import org.webrtc.voiceengine.WebRtcAudioTrack.AudioTrackStartErrorCode; import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback; import org.webrtc.voiceengine.WebRtcAudioUtils; @@ -496,20 +495,16 @@ public void onWebRtcAudioRecordError(String errorMessage) { WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrackErrorCallback() { @Override public void onWebRtcAudioTrackInitError(String errorMessage) { - Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); reportError(errorMessage); } @Override - public void onWebRtcAudioTrackStartError( - AudioTrackStartErrorCode errorCode, String errorMessage) { - Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); + public void onWebRtcAudioTrackStartError(String errorMessage) { reportError(errorMessage); } @Override public void onWebRtcAudioTrackError(String errorMessage) { - Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); reportError(errorMessage); } }); diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java index ae1c696d6d5..729ff376228 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioRecord.java @@ -128,20 +128,16 @@ public void run() { } } - // Stops recording audio data. - if (audioRecord != null) { - Logging.d(TAG, "Calling AudioRecord.stop..."); - try { + try { + if (audioRecord != null) { audioRecord.stop(); - Logging.d(TAG, "AudioRecord.stop is done."); - } catch (IllegalStateException e) { - Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); } + } catch (IllegalStateException e) { + Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); } - } - // Stops the inner thread loop which results in calling AudioRecord.stop(). + // Stops the inner thread loop and also calls AudioRecord.stop(). // Does not block the calling thread. public void stopThread() { Logging.d(TAG, "stopThread"); @@ -259,7 +255,6 @@ private boolean startRecording() { } catch (IllegalStateException e) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, "AudioRecord.startRecording failed: " + e.getMessage()); - releaseAudioResources(); return false; } @@ -275,7 +270,6 @@ private boolean startRecording() { AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, "AudioRecord.startRecording failed - incorrect state :" + audioRecord.getRecordingState()); - releaseAudioResources(); return false; } @@ -292,13 +286,9 @@ private boolean stopRecording() { Logging.d(TAG, "stopRecording"); assertTrue(audioThread != null); audioThread.stopThread(); - - Logging.d(TAG, "Stopping the AudioRecordThread..."); - audioThread.interrupt(); if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioRecordThread timed out."); + Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); } - Logging.d(TAG, "AudioRecordThread has now been stopped."); audioThread = null; if (effects != null) { effects.release(); @@ -363,7 +353,6 @@ public static void setMicrophoneMute(boolean mute) { // Releases the native AudioRecord resources. private void releaseAudioResources() { - Logging.d(TAG, "releaseAudioResources"); if (audioRecord != null) { audioRecord.release(); audioRecord = null; diff --git a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java index 14809b3ad17..ae0217d4a41 100644 --- a/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java +++ b/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRtcAudioTrack.java @@ -72,7 +72,6 @@ private static int getDefaultUsageAttributeOnLollipopOrHigher() { private final long nativeAudioTrack; private final AudioManager audioManager; - private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker(); private ByteBuffer byteBuffer; @@ -84,15 +83,9 @@ private static int getDefaultUsageAttributeOnLollipopOrHigher() { private static volatile boolean speakerMute = false; private byte[] emptyBytes; - // Audio playout/track error handler functions. - public enum AudioTrackStartErrorCode { - AUDIO_TRACK_START_EXCEPTION, - AUDIO_TRACK_START_STATE_MISMATCH, - } - public static interface WebRtcAudioTrackErrorCallback { void onWebRtcAudioTrackInitError(String errorMessage); - void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage); + void onWebRtcAudioTrackStartError(String errorMessage); void onWebRtcAudioTrackError(String errorMessage); } @@ -120,7 +113,26 @@ public AudioTrackThread(String name) { public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + + try { + // In MODE_STREAM mode we can optionally prime the output buffer by + // writing up to bufferSizeInBytes (from constructor) before starting. + // This priming will avoid an immediate underrun, but is not required. + // TODO(henrika): initial tests have shown that priming is not required. + audioTrack.play(); + } catch (IllegalStateException e) { + reportWebRtcAudioTrackStartError("AudioTrack.play failed: " + e.getMessage()); + releaseAudioResources(); + return; + } + // We have seen reports that AudioTrack.play() can sometimes start in a + // paued mode (e.g. when application is in background mode). + // TODO(henrika): consider calling reportWebRtcAudioTrackStartError() + // and release audio resources here as well. For now, let the thread start + // and hope that the audio session can be restored later. + if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { + Logging.w(TAG, "AudioTrack failed to enter playing state."); + } // Fixed size in bytes of each 10ms block of audio data that we ask for // using callbacks to the native WebRTC client. @@ -169,10 +181,10 @@ public void run() { // MODE_STREAM mode, audio will stop playing after the last buffer that // was written has been played. if (audioTrack != null) { - Logging.d(TAG, "Calling AudioTrack.stop..."); + Logging.d(TAG, "Stopping the audio track..."); try { audioTrack.stop(); - Logging.d(TAG, "AudioTrack.stop is done."); + Logging.d(TAG, "The audio track has now been stopped."); } catch (IllegalStateException e) { Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage()); } @@ -188,7 +200,7 @@ private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int s return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes); } - // Stops the inner thread loop which results in calling AudioTrack.stop(). + // Stops the inner thread loop and also calls AudioTrack.pause() and flush(). // Does not block the calling thread. public void stopThread() { Logging.d(TAG, "stopThread"); @@ -197,7 +209,6 @@ public void stopThread() { } WebRtcAudioTrack(long nativeAudioTrack) { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); this.nativeAudioTrack = nativeAudioTrack; audioManager = @@ -208,7 +219,6 @@ public void stopThread() { } private boolean initPlayout(int sampleRate, int channels) { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")"); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); @@ -280,66 +290,41 @@ private boolean initPlayout(int sampleRate, int channels) { } private boolean startPlayout() { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "startPlayout"); assertTrue(audioTrack != null); assertTrue(audioThread == null); - - // Starts playing an audio track. - try { - audioTrack.play(); - } catch (IllegalStateException e) { - reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, - "AudioTrack.play failed: " + e.getMessage()); - releaseAudioResources(); + if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) { + reportWebRtcAudioTrackStartError("AudioTrack instance is not successfully initialized."); return false; } - - // Verify the playout state up to two times (with a sleep in between) - // before returning false and reporting an error. - int numberOfStateChecks = 0; - while (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING && - ++numberOfStateChecks < 2) { - threadSleep(200); - } - if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { - reportWebRtcAudioTrackStartError( - AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, - "AudioTrack.play failed - incorrect state :" - + audioTrack.getPlayState()); - releaseAudioResources(); - return false; - } - - // Create and start new high-priority thread which calls AudioTrack.write() - // and where we also call the native nativeGetPlayoutData() callback to - // request decoded audio from WebRTC. audioThread = new AudioTrackThread("AudioTrackJavaThread"); audioThread.start(); return true; } private boolean stopPlayout() { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "stopPlayout"); assertTrue(audioThread != null); logUnderrunCount(); audioThread.stopThread(); - Logging.d(TAG, "Stopping the AudioTrackThread..."); - audioThread.interrupt(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioTrackThread timed out."); - } - Logging.d(TAG, "AudioTrackThread has now been stopped."); + final Thread aThread = audioThread; audioThread = null; + if (aThread != null) { + Logging.d(TAG, "Stopping the AudioTrackThread..."); + aThread.interrupt(); + if (!ThreadUtils.joinUninterruptibly(aThread, AUDIO_TRACK_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioTrackThread timed out."); + } + Logging.d(TAG, "AudioTrackThread has now been stopped."); + } + releaseAudioResources(); return true; } // Get max possible volume index for a phone call audio stream. private int getStreamMaxVolume() { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "getStreamMaxVolume"); assertTrue(audioManager != null); return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL); @@ -347,7 +332,6 @@ private int getStreamMaxVolume() { // Set current volume level for a phone call audio stream. private boolean setStreamVolume(int volume) { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "setStreamVolume(" + volume + ")"); assertTrue(audioManager != null); if (isVolumeFixed()) { @@ -366,7 +350,6 @@ private boolean isVolumeFixed() { /** Get current volume level for a phone call audio stream. */ private int getStreamVolume() { - threadChecker.checkIsOnValidThread(); Logging.d(TAG, "getStreamVolume"); assertTrue(audioManager != null); return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL); @@ -481,17 +464,16 @@ private void releaseAudioResources() { } private void reportWebRtcAudioTrackInitError(String errorMessage) { - Logging.e(TAG, "Init playout error: " + errorMessage); + Logging.e(TAG, "Init error: " + errorMessage); if (errorCallback != null) { errorCallback.onWebRtcAudioTrackInitError(errorMessage); } } - private void reportWebRtcAudioTrackStartError( - AudioTrackStartErrorCode errorCode, String errorMessage) { - Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); + private void reportWebRtcAudioTrackStartError(String errorMessage) { + Logging.e(TAG, "Start error: " + errorMessage); if (errorCallback != null) { - errorCallback.onWebRtcAudioTrackStartError(errorCode, errorMessage); + errorCallback.onWebRtcAudioTrackStartError(errorMessage); } } @@ -502,13 +484,4 @@ private void reportWebRtcAudioTrackError(String errorMessage) { } } - // Causes the currently executing thread to sleep for the specified number - // of milliseconds. - private void threadSleep(long millis) { - try { - Thread.sleep(millis); - } catch (InterruptedException e) { - Logging.e(TAG, "Thread.sleep failed: " + e.getMessage()); - } - } }