1 /*
2  *  Copyright 2018 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 package org.webrtc.audio;
12 
13 import android.content.Context;
14 import android.media.AudioDeviceInfo;
15 import android.media.AudioManager;
16 import android.os.Build;
17 import android.support.annotation.RequiresApi;
18 import org.webrtc.JniCommon;
19 import org.webrtc.Logging;
20 
21 /**
22  * AudioDeviceModule implemented using android.media.AudioRecord as input and
23  * android.media.AudioTrack as output.
24  */
25 public class JavaAudioDeviceModule implements AudioDeviceModule {
26   private static final String TAG = "JavaAudioDeviceModule";
27 
builder(Context context)28   public static Builder builder(Context context) {
29     return new Builder(context);
30   }
31 
32   public static class Builder {
33     private final Context context;
34     private final AudioManager audioManager;
35     private int inputSampleRate;
36     private int outputSampleRate;
37     private int audioSource = WebRtcAudioRecord.DEFAULT_AUDIO_SOURCE;
38     private int audioFormat = WebRtcAudioRecord.DEFAULT_AUDIO_FORMAT;
39     private AudioTrackErrorCallback audioTrackErrorCallback;
40     private AudioRecordErrorCallback audioRecordErrorCallback;
41     private SamplesReadyCallback samplesReadyCallback;
42     private AudioTrackStateCallback audioTrackStateCallback;
43     private AudioRecordStateCallback audioRecordStateCallback;
44     private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported();
45     private boolean useHardwareNoiseSuppressor = isBuiltInNoiseSuppressorSupported();
46     private boolean useStereoInput;
47     private boolean useStereoOutput;
48 
Builder(Context context)49     private Builder(Context context) {
50       this.context = context;
51       this.audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
52       this.inputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
53       this.outputSampleRate = WebRtcAudioManager.getSampleRate(audioManager);
54     }
55 
56     /**
57      * Call this method if the default handling of querying the native sample rate shall be
58      * overridden. Can be useful on some devices where the available Android APIs are known to
59      * return invalid results.
60      */
setSampleRate(int sampleRate)61     public Builder setSampleRate(int sampleRate) {
62       Logging.d(TAG, "Input/Output sample rate overridden to: " + sampleRate);
63       this.inputSampleRate = sampleRate;
64       this.outputSampleRate = sampleRate;
65       return this;
66     }
67 
68     /**
69      * Call this method to specifically override input sample rate.
70      */
setInputSampleRate(int inputSampleRate)71     public Builder setInputSampleRate(int inputSampleRate) {
72       Logging.d(TAG, "Input sample rate overridden to: " + inputSampleRate);
73       this.inputSampleRate = inputSampleRate;
74       return this;
75     }
76 
77     /**
78      * Call this method to specifically override output sample rate.
79      */
setOutputSampleRate(int outputSampleRate)80     public Builder setOutputSampleRate(int outputSampleRate) {
81       Logging.d(TAG, "Output sample rate overridden to: " + outputSampleRate);
82       this.outputSampleRate = outputSampleRate;
83       return this;
84     }
85 
86     /**
87      * Call this to change the audio source. The argument should be one of the values from
88      * android.media.MediaRecorder.AudioSource. The default is AudioSource.VOICE_COMMUNICATION.
89      */
setAudioSource(int audioSource)90     public Builder setAudioSource(int audioSource) {
91       this.audioSource = audioSource;
92       return this;
93     }
94 
95     /**
96      * Call this to change the audio format. The argument should be one of the values from
97      * android.media.AudioFormat ENCODING_PCM_8BIT, ENCODING_PCM_16BIT or ENCODING_PCM_FLOAT.
98      * Default audio data format is PCM 16 bit per sample.
99      * Guaranteed to be supported by all devices.
100      */
setAudioFormat(int audioFormat)101     public Builder setAudioFormat(int audioFormat) {
102       this.audioFormat = audioFormat;
103       return this;
104     }
105 
106     /**
107      * Set a callback to retrieve errors from the AudioTrack.
108      */
setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback)109     public Builder setAudioTrackErrorCallback(AudioTrackErrorCallback audioTrackErrorCallback) {
110       this.audioTrackErrorCallback = audioTrackErrorCallback;
111       return this;
112     }
113 
114     /**
115      * Set a callback to retrieve errors from the AudioRecord.
116      */
setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback)117     public Builder setAudioRecordErrorCallback(AudioRecordErrorCallback audioRecordErrorCallback) {
118       this.audioRecordErrorCallback = audioRecordErrorCallback;
119       return this;
120     }
121 
122     /**
123      * Set a callback to listen to the raw audio input from the AudioRecord.
124      */
setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback)125     public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback) {
126       this.samplesReadyCallback = samplesReadyCallback;
127       return this;
128     }
129 
130     /**
131      * Set a callback to retrieve information from the AudioTrack on when audio starts and stop.
132      */
setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback)133     public Builder setAudioTrackStateCallback(AudioTrackStateCallback audioTrackStateCallback) {
134       this.audioTrackStateCallback = audioTrackStateCallback;
135       return this;
136     }
137 
138     /**
139      * Set a callback to retrieve information from the AudioRecord on when audio starts and stops.
140      */
setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback)141     public Builder setAudioRecordStateCallback(AudioRecordStateCallback audioRecordStateCallback) {
142       this.audioRecordStateCallback = audioRecordStateCallback;
143       return this;
144     }
145 
146     /**
147      * Control if the built-in HW noise suppressor should be used or not. The default is on if it is
148      * supported. It is possible to query support by calling isBuiltInNoiseSuppressorSupported().
149      */
setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor)150     public Builder setUseHardwareNoiseSuppressor(boolean useHardwareNoiseSuppressor) {
151       if (useHardwareNoiseSuppressor && !isBuiltInNoiseSuppressorSupported()) {
152         Logging.e(TAG, "HW NS not supported");
153         useHardwareNoiseSuppressor = false;
154       }
155       this.useHardwareNoiseSuppressor = useHardwareNoiseSuppressor;
156       return this;
157     }
158 
159     /**
160      * Control if the built-in HW acoustic echo canceler should be used or not. The default is on if
161      * it is supported. It is possible to query support by calling
162      * isBuiltInAcousticEchoCancelerSupported().
163      */
setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler)164     public Builder setUseHardwareAcousticEchoCanceler(boolean useHardwareAcousticEchoCanceler) {
165       if (useHardwareAcousticEchoCanceler && !isBuiltInAcousticEchoCancelerSupported()) {
166         Logging.e(TAG, "HW AEC not supported");
167         useHardwareAcousticEchoCanceler = false;
168       }
169       this.useHardwareAcousticEchoCanceler = useHardwareAcousticEchoCanceler;
170       return this;
171     }
172 
173     /**
174      * Control if stereo input should be used or not. The default is mono.
175      */
setUseStereoInput(boolean useStereoInput)176     public Builder setUseStereoInput(boolean useStereoInput) {
177       this.useStereoInput = useStereoInput;
178       return this;
179     }
180 
181     /**
182      * Control if stereo output should be used or not. The default is mono.
183      */
setUseStereoOutput(boolean useStereoOutput)184     public Builder setUseStereoOutput(boolean useStereoOutput) {
185       this.useStereoOutput = useStereoOutput;
186       return this;
187     }
188 
189     /**
190      * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership
191      * and is responsible for calling release().
192      */
createAudioDeviceModule()193     public AudioDeviceModule createAudioDeviceModule() {
194       Logging.d(TAG, "createAudioDeviceModule");
195       if (useHardwareNoiseSuppressor) {
196         Logging.d(TAG, "HW NS will be used.");
197       } else {
198         if (isBuiltInNoiseSuppressorSupported()) {
199           Logging.d(TAG, "Overriding default behavior; now using WebRTC NS!");
200         }
201         Logging.d(TAG, "HW NS will not be used.");
202       }
203       if (useHardwareAcousticEchoCanceler) {
204         Logging.d(TAG, "HW AEC will be used.");
205       } else {
206         if (isBuiltInAcousticEchoCancelerSupported()) {
207           Logging.d(TAG, "Overriding default behavior; now using WebRTC AEC!");
208         }
209         Logging.d(TAG, "HW AEC will not be used.");
210       }
211       final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, audioManager, audioSource,
212           audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback,
213           useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor);
214       final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(
215           context, audioManager, audioTrackErrorCallback, audioTrackStateCallback);
216       return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput,
217           inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
218     }
219   }
220 
221   /* AudioRecord */
222   // Audio recording error handler functions.
223   public enum AudioRecordStartErrorCode {
224     AUDIO_RECORD_START_EXCEPTION,
225     AUDIO_RECORD_START_STATE_MISMATCH,
226   }
227 
228   public static interface AudioRecordErrorCallback {
onWebRtcAudioRecordInitError(String errorMessage)229     void onWebRtcAudioRecordInitError(String errorMessage);
onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage)230     void onWebRtcAudioRecordStartError(AudioRecordStartErrorCode errorCode, String errorMessage);
onWebRtcAudioRecordError(String errorMessage)231     void onWebRtcAudioRecordError(String errorMessage);
232   }
233 
234   /** Called when audio recording starts and stops. */
235   public static interface AudioRecordStateCallback {
onWebRtcAudioRecordStart()236     void onWebRtcAudioRecordStart();
onWebRtcAudioRecordStop()237     void onWebRtcAudioRecordStop();
238   }
239 
240   /**
241    * Contains audio sample information.
242    */
243   public static class AudioSamples {
244     /** See {@link AudioRecord#getAudioFormat()} */
245     private final int audioFormat;
246     /** See {@link AudioRecord#getChannelCount()} */
247     private final int channelCount;
248     /** See {@link AudioRecord#getSampleRate()} */
249     private final int sampleRate;
250 
251     private final byte[] data;
252 
AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data)253     public AudioSamples(int audioFormat, int channelCount, int sampleRate, byte[] data) {
254       this.audioFormat = audioFormat;
255       this.channelCount = channelCount;
256       this.sampleRate = sampleRate;
257       this.data = data;
258     }
259 
getAudioFormat()260     public int getAudioFormat() {
261       return audioFormat;
262     }
263 
getChannelCount()264     public int getChannelCount() {
265       return channelCount;
266     }
267 
getSampleRate()268     public int getSampleRate() {
269       return sampleRate;
270     }
271 
getData()272     public byte[] getData() {
273       return data;
274     }
275   }
276 
277   /** Called when new audio samples are ready. This should only be set for debug purposes */
278   public static interface SamplesReadyCallback {
onWebRtcAudioRecordSamplesReady(AudioSamples samples)279     void onWebRtcAudioRecordSamplesReady(AudioSamples samples);
280   }
281 
282   /* AudioTrack */
283   // Audio playout/track error handler functions.
284   public enum AudioTrackStartErrorCode {
285     AUDIO_TRACK_START_EXCEPTION,
286     AUDIO_TRACK_START_STATE_MISMATCH,
287   }
288 
289   public static interface AudioTrackErrorCallback {
onWebRtcAudioTrackInitError(String errorMessage)290     void onWebRtcAudioTrackInitError(String errorMessage);
onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage)291     void onWebRtcAudioTrackStartError(AudioTrackStartErrorCode errorCode, String errorMessage);
onWebRtcAudioTrackError(String errorMessage)292     void onWebRtcAudioTrackError(String errorMessage);
293   }
294 
295   /** Called when audio playout starts and stops. */
296   public static interface AudioTrackStateCallback {
onWebRtcAudioTrackStart()297     void onWebRtcAudioTrackStart();
onWebRtcAudioTrackStop()298     void onWebRtcAudioTrackStop();
299   }
300 
301   /**
302    * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can
303    * be excluded).
304    */
isBuiltInAcousticEchoCancelerSupported()305   public static boolean isBuiltInAcousticEchoCancelerSupported() {
306     return WebRtcAudioEffects.isAcousticEchoCancelerSupported();
307   }
308 
309   /**
310    * Returns true if the device supports built-in HW NS, and the UUID is approved (some UUIDs can be
311    * excluded).
312    */
isBuiltInNoiseSuppressorSupported()313   public static boolean isBuiltInNoiseSuppressorSupported() {
314     return WebRtcAudioEffects.isNoiseSuppressorSupported();
315   }
316 
317   private final Context context;
318   private final AudioManager audioManager;
319   private final WebRtcAudioRecord audioInput;
320   private final WebRtcAudioTrack audioOutput;
321   private final int inputSampleRate;
322   private final int outputSampleRate;
323   private final boolean useStereoInput;
324   private final boolean useStereoOutput;
325 
326   private final Object nativeLock = new Object();
327   private long nativeAudioDeviceModule;
328 
JavaAudioDeviceModule(Context context, AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput)329   private JavaAudioDeviceModule(Context context, AudioManager audioManager,
330       WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate,
331       int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) {
332     this.context = context;
333     this.audioManager = audioManager;
334     this.audioInput = audioInput;
335     this.audioOutput = audioOutput;
336     this.inputSampleRate = inputSampleRate;
337     this.outputSampleRate = outputSampleRate;
338     this.useStereoInput = useStereoInput;
339     this.useStereoOutput = useStereoOutput;
340   }
341 
342   @Override
getNativeAudioDeviceModulePointer()343   public long getNativeAudioDeviceModulePointer() {
344     synchronized (nativeLock) {
345       if (nativeAudioDeviceModule == 0) {
346         nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput,
347             audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput);
348       }
349       return nativeAudioDeviceModule;
350     }
351   }
352 
353   @Override
release()354   public void release() {
355     synchronized (nativeLock) {
356       if (nativeAudioDeviceModule != 0) {
357         JniCommon.nativeReleaseRef(nativeAudioDeviceModule);
358         nativeAudioDeviceModule = 0;
359       }
360     }
361   }
362 
363   @Override
setSpeakerMute(boolean mute)364   public void setSpeakerMute(boolean mute) {
365     Logging.d(TAG, "setSpeakerMute: " + mute);
366     audioOutput.setSpeakerMute(mute);
367   }
368 
369   @Override
setMicrophoneMute(boolean mute)370   public void setMicrophoneMute(boolean mute) {
371     Logging.d(TAG, "setMicrophoneMute: " + mute);
372     audioInput.setMicrophoneMute(mute);
373   }
374 
375   /**
376    * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should
377    * only be used if a client gives an explicit option for choosing a physical device to record
378    * from. Otherwise the best-matching device for other parameters will be used. Calling after
379    * recording is started may cause a temporary interruption if the audio routing changes.
380    */
381   @RequiresApi(Build.VERSION_CODES.M)
setPreferredInputDevice(AudioDeviceInfo preferredInputDevice)382   public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) {
383     Logging.d(TAG, "setPreferredInputDevice: " + preferredInputDevice);
384     audioInput.setPreferredDevice(preferredInputDevice);
385   }
386 
nativeCreateAudioDeviceModule(Context context, AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput)387   private static native long nativeCreateAudioDeviceModule(Context context,
388       AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput,
389       int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput);
390 }
391