1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #ifndef MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_
12 #define MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_
13 
14 #include <jni.h>
15 
16 #include <memory>
17 
18 #include "modules/audio_device/android/audio_common.h"
19 #include "modules/audio_device/android/audio_manager.h"
20 #include "modules/audio_device/audio_device_generic.h"
21 #include "modules/audio_device/include/audio_device_defines.h"
22 #include "modules/utility/include/helpers_android.h"
23 #include "modules/utility/include/jvm_android.h"
24 #include "rtc_base/thread_checker.h"
25 
26 namespace webrtc {
27 
28 // Implements 16-bit mono PCM audio output support for Android using the Java
29 // AudioTrack interface. Most of the work is done by its Java counterpart in
30 // WebRtcAudioTrack.java. This class is created and lives on a thread in
31 // C++-land, but decoded audio buffers are requested on a high-priority
32 // thread managed by the Java class.
33 //
34 // An instance must be created and destroyed on one and the same thread.
35 // All public methods must also be called on the same thread. A thread checker
36 // will RTC_DCHECK if any method is called on an invalid thread.
37 //
38 // This class uses JvmThreadConnector to attach to a Java VM if needed
39 // and detach when the object goes out of scope. Additional thread checking
40 // guarantees that no other (possibly non attached) thread is used.
41 class AudioTrackJni {
42  public:
43   // Wraps the Java specific parts of the AudioTrackJni into one helper class.
44   class JavaAudioTrack {
45    public:
46     JavaAudioTrack(NativeRegistration* native_registration,
47                    std::unique_ptr<GlobalRef> audio_track);
48     ~JavaAudioTrack();
49 
50     bool InitPlayout(int sample_rate, int channels);
51     bool StartPlayout();
52     bool StopPlayout();
53     bool SetStreamVolume(int volume);
54     int GetStreamMaxVolume();
55     int GetStreamVolume();
56 
57    private:
58     std::unique_ptr<GlobalRef> audio_track_;
59     jmethodID init_playout_;
60     jmethodID start_playout_;
61     jmethodID stop_playout_;
62     jmethodID set_stream_volume_;
63     jmethodID get_stream_max_volume_;
64     jmethodID get_stream_volume_;
65     jmethodID get_buffer_size_in_frames_;
66   };
67 
68   explicit AudioTrackJni(AudioManager* audio_manager);
69   ~AudioTrackJni();
70 
71   int32_t Init();
72   int32_t Terminate();
73 
74   int32_t InitPlayout();
PlayoutIsInitialized()75   bool PlayoutIsInitialized() const { return initialized_; }
76 
77   int32_t StartPlayout();
78   int32_t StopPlayout();
Playing()79   bool Playing() const { return playing_; }
80 
81   int SpeakerVolumeIsAvailable(bool& available);
82   int SetSpeakerVolume(uint32_t volume);
83   int SpeakerVolume(uint32_t& volume) const;
84   int MaxSpeakerVolume(uint32_t& max_volume) const;
85   int MinSpeakerVolume(uint32_t& min_volume) const;
86 
87   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
88 
89  private:
90   // Called from Java side so we can cache the address of the Java-manged
91   // |byte_buffer| in |direct_buffer_address_|. The size of the buffer
92   // is also stored in |direct_buffer_capacity_in_bytes_|.
93   // Called on the same thread as the creating thread.
94   static void JNICALL CacheDirectBufferAddress(JNIEnv* env,
95                                                jobject obj,
96                                                jobject byte_buffer,
97                                                jlong nativeAudioTrack);
98   void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
99 
100   // Called periodically by the Java based WebRtcAudioTrack object when
101   // playout has started. Each call indicates that |length| new bytes should
102   // be written to the memory area |direct_buffer_address_| for playout.
103   // This method is called on a high-priority thread from Java. The name of
104   // the thread is 'AudioTrackThread'.
105   static void JNICALL GetPlayoutData(JNIEnv* env,
106                                      jobject obj,
107                                      jint length,
108                                      jlong nativeAudioTrack);
109   void OnGetPlayoutData(size_t length);
110 
111   // Stores thread ID in constructor.
112   rtc::ThreadChecker thread_checker_;
113 
114   // Stores thread ID in first call to OnGetPlayoutData() from high-priority
115   // thread in Java. Detached during construction of this object.
116   rtc::ThreadChecker thread_checker_java_;
117 
118   // Calls JavaVM::AttachCurrentThread() if this thread is not attached at
119   // construction.
120   // Also ensures that DetachCurrentThread() is called at destruction.
121   JvmThreadConnector attach_thread_if_needed_;
122 
123   // Wraps the JNI interface pointer and methods associated with it.
124   std::unique_ptr<JNIEnvironment> j_environment_;
125 
126   // Contains factory method for creating the Java object.
127   std::unique_ptr<NativeRegistration> j_native_registration_;
128 
129   // Wraps the Java specific parts of the AudioTrackJni class.
130   std::unique_ptr<AudioTrackJni::JavaAudioTrack> j_audio_track_;
131 
132   // Contains audio parameters provided to this class at construction by the
133   // AudioManager.
134   const AudioParameters audio_parameters_;
135 
136   // Cached copy of address to direct audio buffer owned by |j_audio_track_|.
137   void* direct_buffer_address_;
138 
139   // Number of bytes in the direct audio buffer owned by |j_audio_track_|.
140   size_t direct_buffer_capacity_in_bytes_;
141 
142   // Number of audio frames per audio buffer. Each audio frame corresponds to
143   // one sample of PCM mono data at 16 bits per sample. Hence, each audio
144   // frame contains 2 bytes (given that the Java layer only supports mono).
145   // Example: 480 for 48000 Hz or 441 for 44100 Hz.
146   size_t frames_per_buffer_;
147 
148   bool initialized_;
149 
150   bool playing_;
151 
152   // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
153   // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create().
154   // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance
155   // and therefore outlives this object.
156   AudioDeviceBuffer* audio_device_buffer_;
157 };
158 
159 }  // namespace webrtc
160 
161 #endif  // MODULES_AUDIO_DEVICE_ANDROID_AUDIO_TRACK_JNI_H_
162