1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
12 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
13 #include "webrtc/system_wrappers/include/tick_util.h"
14 
15 #ifdef ANDROID_LOG
16 #include <android/log.h>
17 #include <stdio.h>
18 
19 #undef WEBRTC_TRACE
20 #define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
21 #else
22 #include "webrtc/system_wrappers/include/trace.h"
23 #endif
24 
25 namespace webrtc {
26 
AndroidNativeOpenGl2Renderer(const int32_t id,const VideoRenderType videoRenderType,void * window,const bool fullscreen)27 AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
28     const int32_t id,
29     const VideoRenderType videoRenderType,
30     void* window,
31     const bool fullscreen) :
32     VideoRenderAndroid(id, videoRenderType, window, fullscreen),
33     _javaRenderObj(NULL),
34     _javaRenderClass(NULL) {
35 }
36 
UseOpenGL2(void * window)37 bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
38   if (!g_jvm) {
39     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
40                  "RendererAndroid():UseOpenGL No JVM set.");
41     return false;
42   }
43   bool isAttached = false;
44   JNIEnv* env = NULL;
45   if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
46     // try to attach the thread and get the env
47     // Attach this thread to JVM
48     jint res = g_jvm->AttachCurrentThread(&env, NULL);
49 
50     // Get the JNI env for this thread
51     if ((res < 0) || !env) {
52       WEBRTC_TRACE(
53           kTraceError,
54           kTraceVideoRenderer,
55           -1,
56           "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
57           res, env);
58       return false;
59     }
60     isAttached = true;
61   }
62 
63   // get the renderer class
64   jclass javaRenderClassLocal =
65       env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
66   if (!javaRenderClassLocal) {
67     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
68                  "%s: could not find ViEAndroidRenderer class",
69                  __FUNCTION__);
70     return false;
71   }
72 
73   // get the method ID for UseOpenGL
74   jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
75                                                   "UseOpenGL2",
76                                                   "(Ljava/lang/Object;)Z");
77   if (cidUseOpenGL == NULL) {
78     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
79                  "%s: could not get UseOpenGL ID", __FUNCTION__);
80     return false;
81   }
82   jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
83                                               cidUseOpenGL, (jobject) window);
84 
85   // Detach this thread if it was attached
86   if (isAttached) {
87     if (g_jvm->DetachCurrentThread() < 0) {
88       WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
89                    "%s: Could not detach thread from JVM", __FUNCTION__);
90     }
91   }
92   return res;
93 }
94 
~AndroidNativeOpenGl2Renderer()95 AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
96   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
97                "AndroidNativeOpenGl2Renderer dtor");
98   if (g_jvm) {
99     // get the JNI env for this thread
100     bool isAttached = false;
101     JNIEnv* env = NULL;
102     if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
103       // try to attach the thread and get the env
104       // Attach this thread to JVM
105       jint res = g_jvm->AttachCurrentThread(&env, NULL);
106 
107       // Get the JNI env for this thread
108       if ((res < 0) || !env) {
109         WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
110                      "%s: Could not attach thread to JVM (%d, %p)",
111                      __FUNCTION__, res, env);
112         env = NULL;
113       }
114       else {
115         isAttached = true;
116       }
117     }
118 
119     env->DeleteGlobalRef(_javaRenderObj);
120     env->DeleteGlobalRef(_javaRenderClass);
121 
122     if (isAttached) {
123       if (g_jvm->DetachCurrentThread() < 0) {
124         WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
125                      "%s: Could not detach thread from JVM",
126                      __FUNCTION__);
127       }
128     }
129   }
130 }
131 
Init()132 int32_t AndroidNativeOpenGl2Renderer::Init() {
133   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
134   if (!g_jvm) {
135     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
136                  "(%s): Not a valid Java VM pointer.", __FUNCTION__);
137     return -1;
138   }
139   if (!_ptrWindow) {
140     WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
141                  "(%s): No window have been provided.", __FUNCTION__);
142     return -1;
143   }
144 
145   // get the JNI env for this thread
146   bool isAttached = false;
147   JNIEnv* env = NULL;
148   if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
149     // try to attach the thread and get the env
150     // Attach this thread to JVM
151     jint res = g_jvm->AttachCurrentThread(&env, NULL);
152 
153     // Get the JNI env for this thread
154     if ((res < 0) || !env) {
155       WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
156                    "%s: Could not attach thread to JVM (%d, %p)",
157                    __FUNCTION__, res, env);
158       return -1;
159     }
160     isAttached = true;
161   }
162 
163   // get the ViEAndroidGLES20 class
164   jclass javaRenderClassLocal =
165       env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
166   if (!javaRenderClassLocal) {
167     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
168                  "%s: could not find ViEAndroidGLES20", __FUNCTION__);
169     return -1;
170   }
171 
172   // create a global reference to the class (to tell JNI that
173   // we are referencing it after this function has returned)
174   _javaRenderClass =
175       reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
176   if (!_javaRenderClass) {
177     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
178                  "%s: could not create Java SurfaceHolder class reference",
179                  __FUNCTION__);
180     return -1;
181   }
182 
183   // Delete local class ref, we only use the global ref
184   env->DeleteLocalRef(javaRenderClassLocal);
185 
186   // create a reference to the object (to tell JNI that we are referencing it
187   // after this function has returned)
188   _javaRenderObj = env->NewGlobalRef(_ptrWindow);
189   if (!_javaRenderObj) {
190     WEBRTC_TRACE(
191         kTraceError,
192         kTraceVideoRenderer,
193         _id,
194         "%s: could not create Java SurfaceRender object reference",
195         __FUNCTION__);
196     return -1;
197   }
198 
199   // Detach this thread if it was attached
200   if (isAttached) {
201     if (g_jvm->DetachCurrentThread() < 0) {
202       WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
203                    "%s: Could not detach thread from JVM", __FUNCTION__);
204     }
205   }
206 
207   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
208                __FUNCTION__);
209   return 0;
210 
211 }
212 AndroidStream*
CreateAndroidRenderChannel(int32_t streamId,int32_t zOrder,const float left,const float top,const float right,const float bottom,VideoRenderAndroid & renderer)213 AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
214     int32_t streamId,
215     int32_t zOrder,
216     const float left,
217     const float top,
218     const float right,
219     const float bottom,
220     VideoRenderAndroid& renderer) {
221   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
222                __FUNCTION__, streamId);
223   AndroidNativeOpenGl2Channel* stream =
224       new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
225                                       _javaRenderObj);
226   if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
227     return stream;
228   else {
229     delete stream;
230   }
231   return NULL;
232 }
233 
AndroidNativeOpenGl2Channel(uint32_t streamId,JavaVM * jvm,VideoRenderAndroid & renderer,jobject javaRenderObj)234 AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
235     uint32_t streamId,
236     JavaVM* jvm,
237     VideoRenderAndroid& renderer,jobject javaRenderObj):
238     _id(streamId),
239     _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
240     _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
241     _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
242     _openGLRenderer(streamId) {
243 
244 }
~AndroidNativeOpenGl2Channel()245 AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
246   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
247                "AndroidNativeOpenGl2Channel dtor");
248   if (_jvm) {
249     // get the JNI env for this thread
250     bool isAttached = false;
251     JNIEnv* env = NULL;
252     if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
253       // try to attach the thread and get the env
254       // Attach this thread to JVM
255       jint res = _jvm->AttachCurrentThread(&env, NULL);
256 
257       // Get the JNI env for this thread
258       if ((res < 0) || !env) {
259         WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
260                      "%s: Could not attach thread to JVM (%d, %p)",
261                      __FUNCTION__, res, env);
262         env = NULL;
263       } else {
264         isAttached = true;
265       }
266     }
267     if (env && _deRegisterNativeCID) {
268       env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
269     }
270 
271     if (isAttached) {
272       if (_jvm->DetachCurrentThread() < 0) {
273         WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
274                      "%s: Could not detach thread from JVM",
275                      __FUNCTION__);
276       }
277     }
278   }
279 
280   delete &_renderCritSect;
281 }
282 
Init(int32_t zOrder,const float left,const float top,const float right,const float bottom)283 int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
284                                           const float left,
285                                           const float top,
286                                           const float right,
287                                           const float bottom)
288 {
289   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
290                "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
291   if (!_jvm) {
292     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
293                  "%s: Not a valid Java VM pointer", __FUNCTION__);
294     return -1;
295   }
296 
297   // get the JNI env for this thread
298   bool isAttached = false;
299   JNIEnv* env = NULL;
300   if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
301     // try to attach the thread and get the env
302     // Attach this thread to JVM
303     jint res = _jvm->AttachCurrentThread(&env, NULL);
304 
305     // Get the JNI env for this thread
306     if ((res < 0) || !env) {
307       WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
308                    "%s: Could not attach thread to JVM (%d, %p)",
309                    __FUNCTION__, res, env);
310       return -1;
311     }
312     isAttached = true;
313   }
314 
315   jclass javaRenderClass =
316       env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
317   if (!javaRenderClass) {
318     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
319                  "%s: could not find ViESurfaceRenderer", __FUNCTION__);
320     return -1;
321   }
322 
323   // get the method ID for the ReDraw function
324   _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
325   if (_redrawCid == NULL) {
326     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
327                  "%s: could not get ReDraw ID", __FUNCTION__);
328     return -1;
329   }
330 
331   _registerNativeCID = env->GetMethodID(javaRenderClass,
332                                         "RegisterNativeObject", "(J)V");
333   if (_registerNativeCID == NULL) {
334     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
335                  "%s: could not get RegisterNativeObject ID", __FUNCTION__);
336     return -1;
337   }
338 
339   _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
340                                           "DeRegisterNativeObject", "()V");
341   if (_deRegisterNativeCID == NULL) {
342     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
343                  "%s: could not get DeRegisterNativeObject ID",
344                  __FUNCTION__);
345     return -1;
346   }
347 
348   JNINativeMethod nativeFunctions[2] = {
349     { "DrawNative",
350       "(J)V",
351       (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
352     { "CreateOpenGLNative",
353       "(JII)I",
354       (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
355   };
356   if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
357     WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
358                  "%s: Registered native functions", __FUNCTION__);
359   }
360   else {
361     WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
362                  "%s: Failed to register native functions", __FUNCTION__);
363     return -1;
364   }
365 
366   env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
367 
368   // Detach this thread if it was attached
369   if (isAttached) {
370     if (_jvm->DetachCurrentThread() < 0) {
371       WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
372                    "%s: Could not detach thread from JVM", __FUNCTION__);
373     }
374   }
375 
376   if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
377     return -1;
378   }
379   WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
380                "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
381   return 0;
382 }
383 
RenderFrame(const uint32_t,const VideoFrame & videoFrame)384 int32_t AndroidNativeOpenGl2Channel::RenderFrame(const uint32_t /*streamId*/,
385                                                  const VideoFrame& videoFrame) {
386   //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
387   _renderCritSect.Enter();
388   _bufferToRender = videoFrame;
389   _renderCritSect.Leave();
390   _renderer.ReDraw();
391   return 0;
392 }
393 
394 /*Implements AndroidStream
395  * Calls the Java object and render the buffer in _bufferToRender
396  */
DeliverFrame(JNIEnv * jniEnv)397 void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
398   //TickTime timeNow=TickTime::Now();
399 
400   //Draw the Surface
401   jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
402 
403   // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
404   // "%s: time to deliver %lld" ,__FUNCTION__,
405   // (TickTime::Now()-timeNow).Milliseconds());
406 }
407 
408 /*
409  * JNI callback from Java class. Called when the render
410  * want to render a frame. Called from the GLRenderThread
411  * Method:    DrawNative
412  * Signature: (J)V
413  */
DrawNativeStatic(JNIEnv * env,jobject,jlong context)414 void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
415     JNIEnv * env, jobject, jlong context) {
416   AndroidNativeOpenGl2Channel* renderChannel =
417       reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
418   renderChannel->DrawNative();
419 }
420 
DrawNative()421 void AndroidNativeOpenGl2Channel::DrawNative() {
422   _renderCritSect.Enter();
423   _openGLRenderer.Render(_bufferToRender);
424   _renderCritSect.Leave();
425 }
426 
427 /*
428  * JNI callback from Java class. Called when the GLSurfaceview
429  * have created a surface. Called from the GLRenderThread
430  * Method:    CreateOpenGLNativeStatic
431  * Signature: (JII)I
432  */
CreateOpenGLNativeStatic(JNIEnv * env,jobject,jlong context,jint width,jint height)433 jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
434     JNIEnv * env,
435     jobject,
436     jlong context,
437     jint width,
438     jint height) {
439   AndroidNativeOpenGl2Channel* renderChannel =
440       reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
441   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
442   return renderChannel->CreateOpenGLNative(width, height);
443 }
444 
CreateOpenGLNative(int width,int height)445 jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
446     int width, int height) {
447   return _openGLRenderer.Setup(width, height);
448 }
449 
450 }  // namespace webrtc
451