1 /*
2  *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc.voiceengine;
12 
13 import java.lang.Thread;
14 import java.nio.ByteBuffer;
15 
16 import android.annotation.TargetApi;
17 import android.content.Context;
18 import android.media.AudioFormat;
19 import android.media.AudioManager;
20 import android.media.AudioTrack;
21 import android.os.Process;
22 
23 import org.webrtc.Logging;
24 
25 class WebRtcAudioTrack {
26   private static final boolean DEBUG = false;
27 
28   private static final String TAG = "WebRtcAudioTrack";
29 
30   // Default audio data format is PCM 16 bit per sample.
31   // Guaranteed to be supported by all devices.
32   private static final int BITS_PER_SAMPLE = 16;
33 
34   // Requested size of each recorded buffer provided to the client.
35   private static final int CALLBACK_BUFFER_SIZE_MS = 10;
36 
37   // Average number of callbacks per second.
38   private static final int BUFFERS_PER_SECOND = 1000 / CALLBACK_BUFFER_SIZE_MS;
39 
40   private final Context context;
41   private final long nativeAudioTrack;
42   private final AudioManager audioManager;
43 
44   private ByteBuffer byteBuffer;
45 
46   private AudioTrack audioTrack = null;
47   private AudioTrackThread audioThread = null;
48 
49   /**
50    * Audio thread which keeps calling AudioTrack.write() to stream audio.
51    * Data is periodically acquired from the native WebRTC layer using the
52    * nativeGetPlayoutData callback function.
53    * This thread uses a Process.THREAD_PRIORITY_URGENT_AUDIO priority.
54    */
55   private class AudioTrackThread extends Thread {
56     private volatile boolean keepAlive = true;
57 
AudioTrackThread(String name)58     public AudioTrackThread(String name) {
59       super(name);
60     }
61 
62     @Override
run()63     public void run() {
64       Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO);
65       Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo());
66 
67       try {
68         // In MODE_STREAM mode we can optionally prime the output buffer by
69         // writing up to bufferSizeInBytes (from constructor) before starting.
70         // This priming will avoid an immediate underrun, but is not required.
71         // TODO(henrika): initial tests have shown that priming is not required.
72         audioTrack.play();
73         assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING);
74       } catch (IllegalStateException e) {
75           Logging.e(TAG, "AudioTrack.play failed: " + e.getMessage());
76         return;
77       }
78 
79       // Fixed size in bytes of each 10ms block of audio data that we ask for
80       // using callbacks to the native WebRTC client.
81       final int sizeInBytes = byteBuffer.capacity();
82 
83       while (keepAlive) {
84         // Get 10ms of PCM data from the native WebRTC client. Audio data is
85         // written into the common ByteBuffer using the address that was
86         // cached at construction.
87         nativeGetPlayoutData(sizeInBytes, nativeAudioTrack);
88         // Write data until all data has been written to the audio sink.
89         // Upon return, the buffer position will have been advanced to reflect
90         // the amount of data that was successfully written to the AudioTrack.
91         assertTrue(sizeInBytes <= byteBuffer.remaining());
92         int bytesWritten = 0;
93         if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
94           bytesWritten = writeOnLollipop(audioTrack, byteBuffer, sizeInBytes);
95         } else {
96           bytesWritten = writePreLollipop(audioTrack, byteBuffer, sizeInBytes);
97         }
98         if (bytesWritten != sizeInBytes) {
99           Logging.e(TAG, "AudioTrack.write failed: " + bytesWritten);
100           if (bytesWritten == AudioTrack.ERROR_INVALID_OPERATION) {
101             keepAlive = false;
102           }
103         }
104         // The byte buffer must be rewinded since byteBuffer.position() is
105         // increased at each call to AudioTrack.write(). If we don't do this,
106         // next call to AudioTrack.write() will fail.
107         byteBuffer.rewind();
108 
109         // TODO(henrika): it is possible to create a delay estimate here by
110         // counting number of written frames and subtracting the result from
111         // audioTrack.getPlaybackHeadPosition().
112       }
113 
114       try {
115         audioTrack.stop();
116       } catch (IllegalStateException e) {
117         Logging.e(TAG, "AudioTrack.stop failed: " + e.getMessage());
118       }
119       assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
120       audioTrack.flush();
121     }
122 
123     @TargetApi(21)
writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes)124     private int writeOnLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
125       return audioTrack.write(byteBuffer, sizeInBytes, AudioTrack.WRITE_BLOCKING);
126     }
127 
writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes)128     private int writePreLollipop(AudioTrack audioTrack, ByteBuffer byteBuffer, int sizeInBytes) {
129       return audioTrack.write(byteBuffer.array(), byteBuffer.arrayOffset(), sizeInBytes);
130     }
131 
joinThread()132     public void joinThread() {
133       keepAlive = false;
134       while (isAlive()) {
135         try {
136           join();
137         } catch (InterruptedException e) {
138           // Ignore.
139         }
140       }
141     }
142   }
143 
WebRtcAudioTrack(Context context, long nativeAudioTrack)144   WebRtcAudioTrack(Context context, long nativeAudioTrack) {
145     Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo());
146     this.context = context;
147     this.nativeAudioTrack = nativeAudioTrack;
148     audioManager = (AudioManager) context.getSystemService(
149         Context.AUDIO_SERVICE);
150     if (DEBUG) {
151       WebRtcAudioUtils.logDeviceInfo(TAG);
152     }
153   }
154 
initPlayout(int sampleRate, int channels)155   private void initPlayout(int sampleRate, int channels) {
156     Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels="
157         + channels + ")");
158     final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
159     byteBuffer = byteBuffer.allocateDirect(
160         bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
161     Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
162     // Rather than passing the ByteBuffer with every callback (requiring
163     // the potentially expensive GetDirectBufferAddress) we simply have the
164     // the native class cache the address to the memory once.
165     nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
166 
167     // Get the minimum buffer size required for the successful creation of an
168     // AudioTrack object to be created in the MODE_STREAM mode.
169     // Note that this size doesn't guarantee a smooth playback under load.
170     // TODO(henrika): should we extend the buffer size to avoid glitches?
171     final int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
172         sampleRate,
173         AudioFormat.CHANNEL_OUT_MONO,
174         AudioFormat.ENCODING_PCM_16BIT);
175     Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
176     assertTrue(audioTrack == null);
177 
178     // For the streaming mode, data must be written to the audio sink in
179     // chunks of size (given by byteBuffer.capacity()) less than or equal
180     // to the total buffer size |minBufferSizeInBytes|.
181     assertTrue(byteBuffer.capacity() < minBufferSizeInBytes);
182     try {
183       // Create an AudioTrack object and initialize its associated audio buffer.
184       // The size of this buffer determines how long an AudioTrack can play
185       // before running out of data.
186       audioTrack = new AudioTrack(AudioManager.STREAM_VOICE_CALL,
187                                   sampleRate,
188                                   AudioFormat.CHANNEL_OUT_MONO,
189                                   AudioFormat.ENCODING_PCM_16BIT,
190                                   minBufferSizeInBytes,
191                                   AudioTrack.MODE_STREAM);
192     } catch (IllegalArgumentException e) {
193       Logging.d(TAG, e.getMessage());
194       return;
195     }
196     assertTrue(audioTrack.getState() == AudioTrack.STATE_INITIALIZED);
197     assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_STOPPED);
198     assertTrue(audioTrack.getStreamType() == AudioManager.STREAM_VOICE_CALL);
199   }
200 
201   private boolean startPlayout() {
202     Logging.d(TAG, "startPlayout");
203     assertTrue(audioTrack != null);
204     assertTrue(audioThread == null);
205     audioThread = new AudioTrackThread("AudioTrackJavaThread");
206     audioThread.start();
207     return true;
208   }
209 
210   private boolean stopPlayout() {
211     Logging.d(TAG, "stopPlayout");
212     assertTrue(audioThread != null);
213     audioThread.joinThread();
214     audioThread = null;
215     if (audioTrack != null) {
216       audioTrack.release();
217       audioTrack = null;
218     }
219     return true;
220   }
221 
222   /** Get max possible volume index for a phone call audio stream. */
223   private int getStreamMaxVolume() {
224     Logging.d(TAG, "getStreamMaxVolume");
225     assertTrue(audioManager != null);
226     return audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);
227   }
228 
229   /** Set current volume level for a phone call audio stream. */
230   private boolean setStreamVolume(int volume) {
231     Logging.d(TAG, "setStreamVolume(" + volume + ")");
232     assertTrue(audioManager != null);
233     if (isVolumeFixed()) {
234       Logging.e(TAG, "The device implements a fixed volume policy.");
235       return false;
236     }
237     audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL, volume, 0);
238     return true;
239   }
240 
241   @TargetApi(21)
242   private boolean isVolumeFixed() {
243     if (!WebRtcAudioUtils.runningOnLollipopOrHigher())
244       return false;
245     return audioManager.isVolumeFixed();
246   }
247 
248   /** Get current volume level for a phone call audio stream. */
249   private int getStreamVolume() {
250     Logging.d(TAG, "getStreamVolume");
251     assertTrue(audioManager != null);
252     return audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
253   }
254 
255   /** Helper method which throws an exception  when an assertion has failed. */
256   private static void assertTrue(boolean condition) {
257     if (!condition) {
258       throw new AssertionError("Expected condition to be true");
259     }
260   }
261 
262   private native void nativeCacheDirectBufferAddress(
263       ByteBuffer byteBuffer, long nativeAudioRecord);
264 
265   private native void nativeGetPlayoutData(int bytes, long nativeAudioRecord);
266 }
267