1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/video_render/android/video_render_android_native_opengl2.h"
12#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
13#include "webrtc/system_wrappers/interface/tick_util.h"
14
15#ifdef ANDROID_LOG
16#include <android/log.h>
17#include <stdio.h>
18
19#undef WEBRTC_TRACE
20#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
21#else
22#include "webrtc/system_wrappers/interface/trace.h"
23#endif
24
25namespace webrtc {
26
27AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
28    const int32_t id,
29    const VideoRenderType videoRenderType,
30    void* window,
31    const bool fullscreen) :
32    VideoRenderAndroid(id, videoRenderType, window, fullscreen),
33    _javaRenderObj(NULL),
34    _javaRenderClass(NULL) {
35}
36
37bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
38  if (!g_jvm) {
39    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
40                 "RendererAndroid():UseOpenGL No JVM set.");
41    return false;
42  }
43  bool isAttached = false;
44  JNIEnv* env = NULL;
45  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
46    // try to attach the thread and get the env
47    // Attach this thread to JVM
48    jint res = g_jvm->AttachCurrentThread(&env, NULL);
49
50    // Get the JNI env for this thread
51    if ((res < 0) || !env) {
52      WEBRTC_TRACE(
53          kTraceError,
54          kTraceVideoRenderer,
55          -1,
56          "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
57          res, env);
58      return false;
59    }
60    isAttached = true;
61  }
62
63  // get the renderer class
64  jclass javaRenderClassLocal =
65      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
66  if (!javaRenderClassLocal) {
67    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
68                 "%s: could not find ViEAndroidRenderer class",
69                 __FUNCTION__);
70    return false;
71  }
72
73  // get the method ID for UseOpenGL
74  jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
75                                                  "UseOpenGL2",
76                                                  "(Ljava/lang/Object;)Z");
77  if (cidUseOpenGL == NULL) {
78    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
79                 "%s: could not get UseOpenGL ID", __FUNCTION__);
80    return false;
81  }
82  jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
83                                              cidUseOpenGL, (jobject) window);
84
85  // Detach this thread if it was attached
86  if (isAttached) {
87    if (g_jvm->DetachCurrentThread() < 0) {
88      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
89                   "%s: Could not detach thread from JVM", __FUNCTION__);
90    }
91  }
92  return res;
93}
94
95AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
96  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
97               "AndroidNativeOpenGl2Renderer dtor");
98  if (g_jvm) {
99    // get the JNI env for this thread
100    bool isAttached = false;
101    JNIEnv* env = NULL;
102    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
103      // try to attach the thread and get the env
104      // Attach this thread to JVM
105      jint res = g_jvm->AttachCurrentThread(&env, NULL);
106
107      // Get the JNI env for this thread
108      if ((res < 0) || !env) {
109        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
110                     "%s: Could not attach thread to JVM (%d, %p)",
111                     __FUNCTION__, res, env);
112        env = NULL;
113      }
114      else {
115        isAttached = true;
116      }
117    }
118
119    env->DeleteGlobalRef(_javaRenderObj);
120    env->DeleteGlobalRef(_javaRenderClass);
121
122    if (isAttached) {
123      if (g_jvm->DetachCurrentThread() < 0) {
124        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
125                     "%s: Could not detach thread from JVM",
126                     __FUNCTION__);
127      }
128    }
129  }
130}
131
132int32_t AndroidNativeOpenGl2Renderer::Init() {
133  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
134  if (!g_jvm) {
135    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
136                 "(%s): Not a valid Java VM pointer.", __FUNCTION__);
137    return -1;
138  }
139  if (!_ptrWindow) {
140    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
141                 "(%s): No window have been provided.", __FUNCTION__);
142    return -1;
143  }
144
145  // get the JNI env for this thread
146  bool isAttached = false;
147  JNIEnv* env = NULL;
148  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
149    // try to attach the thread and get the env
150    // Attach this thread to JVM
151    jint res = g_jvm->AttachCurrentThread(&env, NULL);
152
153    // Get the JNI env for this thread
154    if ((res < 0) || !env) {
155      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
156                   "%s: Could not attach thread to JVM (%d, %p)",
157                   __FUNCTION__, res, env);
158      return -1;
159    }
160    isAttached = true;
161  }
162
163  // get the ViEAndroidGLES20 class
164  jclass javaRenderClassLocal =
165      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
166  if (!javaRenderClassLocal) {
167    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
168                 "%s: could not find ViEAndroidGLES20", __FUNCTION__);
169    return -1;
170  }
171
172  // create a global reference to the class (to tell JNI that
173  // we are referencing it after this function has returned)
174  _javaRenderClass =
175      reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
176  if (!_javaRenderClass) {
177    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
178                 "%s: could not create Java SurfaceHolder class reference",
179                 __FUNCTION__);
180    return -1;
181  }
182
183  // Delete local class ref, we only use the global ref
184  env->DeleteLocalRef(javaRenderClassLocal);
185
186  // create a reference to the object (to tell JNI that we are referencing it
187  // after this function has returned)
188  _javaRenderObj = env->NewGlobalRef(_ptrWindow);
189  if (!_javaRenderObj) {
190    WEBRTC_TRACE(
191        kTraceError,
192        kTraceVideoRenderer,
193        _id,
194        "%s: could not create Java SurfaceRender object reference",
195        __FUNCTION__);
196    return -1;
197  }
198
199  // Detach this thread if it was attached
200  if (isAttached) {
201    if (g_jvm->DetachCurrentThread() < 0) {
202      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
203                   "%s: Could not detach thread from JVM", __FUNCTION__);
204    }
205  }
206
207  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
208               __FUNCTION__);
209  return 0;
210
211}
212AndroidStream*
213AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
214    int32_t streamId,
215    int32_t zOrder,
216    const float left,
217    const float top,
218    const float right,
219    const float bottom,
220    VideoRenderAndroid& renderer) {
221  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
222               __FUNCTION__, streamId);
223  AndroidNativeOpenGl2Channel* stream =
224      new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
225                                      _javaRenderObj);
226  if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
227    return stream;
228  else {
229    delete stream;
230  }
231  return NULL;
232}
233
234AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
235    uint32_t streamId,
236    JavaVM* jvm,
237    VideoRenderAndroid& renderer,jobject javaRenderObj):
238    _id(streamId),
239    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
240    _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
241    _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
242    _openGLRenderer(streamId) {
243
244}
245AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
246  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
247               "AndroidNativeOpenGl2Channel dtor");
248  if (_jvm) {
249    // get the JNI env for this thread
250    bool isAttached = false;
251    JNIEnv* env = NULL;
252    if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
253      // try to attach the thread and get the env
254      // Attach this thread to JVM
255      jint res = _jvm->AttachCurrentThread(&env, NULL);
256
257      // Get the JNI env for this thread
258      if ((res < 0) || !env) {
259        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
260                     "%s: Could not attach thread to JVM (%d, %p)",
261                     __FUNCTION__, res, env);
262        env = NULL;
263      } else {
264        isAttached = true;
265      }
266    }
267    if (env && _deRegisterNativeCID) {
268      env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
269    }
270
271    if (isAttached) {
272      if (_jvm->DetachCurrentThread() < 0) {
273        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
274                     "%s: Could not detach thread from JVM",
275                     __FUNCTION__);
276      }
277    }
278  }
279
280  delete &_renderCritSect;
281}
282
283int32_t AndroidNativeOpenGl2Channel::Init(int32_t zOrder,
284                                          const float left,
285                                          const float top,
286                                          const float right,
287                                          const float bottom)
288{
289  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
290               "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
291  if (!_jvm) {
292    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
293                 "%s: Not a valid Java VM pointer", __FUNCTION__);
294    return -1;
295  }
296
297  // get the JNI env for this thread
298  bool isAttached = false;
299  JNIEnv* env = NULL;
300  if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
301    // try to attach the thread and get the env
302    // Attach this thread to JVM
303    jint res = _jvm->AttachCurrentThread(&env, NULL);
304
305    // Get the JNI env for this thread
306    if ((res < 0) || !env) {
307      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
308                   "%s: Could not attach thread to JVM (%d, %p)",
309                   __FUNCTION__, res, env);
310      return -1;
311    }
312    isAttached = true;
313  }
314
315  jclass javaRenderClass =
316      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
317  if (!javaRenderClass) {
318    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
319                 "%s: could not find ViESurfaceRenderer", __FUNCTION__);
320    return -1;
321  }
322
323  // get the method ID for the ReDraw function
324  _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
325  if (_redrawCid == NULL) {
326    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
327                 "%s: could not get ReDraw ID", __FUNCTION__);
328    return -1;
329  }
330
331  _registerNativeCID = env->GetMethodID(javaRenderClass,
332                                        "RegisterNativeObject", "(J)V");
333  if (_registerNativeCID == NULL) {
334    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
335                 "%s: could not get RegisterNativeObject ID", __FUNCTION__);
336    return -1;
337  }
338
339  _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
340                                          "DeRegisterNativeObject", "()V");
341  if (_deRegisterNativeCID == NULL) {
342    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
343                 "%s: could not get DeRegisterNativeObject ID",
344                 __FUNCTION__);
345    return -1;
346  }
347
348  JNINativeMethod nativeFunctions[2] = {
349    { "DrawNative",
350      "(J)V",
351      (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
352    { "CreateOpenGLNative",
353      "(JII)I",
354      (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
355  };
356  if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
357    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
358                 "%s: Registered native functions", __FUNCTION__);
359  }
360  else {
361    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
362                 "%s: Failed to register native functions", __FUNCTION__);
363    return -1;
364  }
365
366  env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
367
368  // Detach this thread if it was attached
369  if (isAttached) {
370    if (_jvm->DetachCurrentThread() < 0) {
371      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
372                   "%s: Could not detach thread from JVM", __FUNCTION__);
373    }
374  }
375
376  if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
377    return -1;
378  }
379  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
380               "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
381  return 0;
382}
383
384int32_t AndroidNativeOpenGl2Channel::RenderFrame(
385    const uint32_t /*streamId*/,
386    I420VideoFrame& videoFrame) {
387  //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
388  _renderCritSect.Enter();
389  _bufferToRender.SwapFrame(&videoFrame);
390  _renderCritSect.Leave();
391  _renderer.ReDraw();
392  return 0;
393}
394
395/*Implements AndroidStream
396 * Calls the Java object and render the buffer in _bufferToRender
397 */
398void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
399  //TickTime timeNow=TickTime::Now();
400
401  //Draw the Surface
402  jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
403
404  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
405  // "%s: time to deliver %lld" ,__FUNCTION__,
406  // (TickTime::Now()-timeNow).Milliseconds());
407}
408
409/*
410 * JNI callback from Java class. Called when the render
411 * want to render a frame. Called from the GLRenderThread
412 * Method:    DrawNative
413 * Signature: (J)V
414 */
415void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
416    JNIEnv * env, jobject, jlong context) {
417  AndroidNativeOpenGl2Channel* renderChannel =
418      reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
419  renderChannel->DrawNative();
420}
421
422void AndroidNativeOpenGl2Channel::DrawNative() {
423  _renderCritSect.Enter();
424  _openGLRenderer.Render(_bufferToRender);
425  _renderCritSect.Leave();
426}
427
428/*
429 * JNI callback from Java class. Called when the GLSurfaceview
430 * have created a surface. Called from the GLRenderThread
431 * Method:    CreateOpenGLNativeStatic
432 * Signature: (JII)I
433 */
434jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
435    JNIEnv * env,
436    jobject,
437    jlong context,
438    jint width,
439    jint height) {
440  AndroidNativeOpenGl2Channel* renderChannel =
441      reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
442  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
443  return renderChannel->CreateOpenGLNative(width, height);
444}
445
446jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
447    int width, int height) {
448  return _openGLRenderer.Setup(width, height);
449}
450
451}  // namespace webrtc
452