1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
12#include "webrtc/modules/video_render/android/video_render_android_surface_view.h"
13#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
14#include "webrtc/system_wrappers/interface/tick_util.h"
15
16#ifdef ANDROID_LOG
17#include <android/log.h>
18#include <stdio.h>
19
20#undef WEBRTC_TRACE
21#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
22#else
23#include "webrtc/system_wrappers/interface/trace.h"
24#endif
25
26namespace webrtc {
27
28AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
29    const int32_t id,
30    const VideoRenderType videoRenderType,
31    void* window,
32    const bool fullscreen) :
33    VideoRenderAndroid(id,videoRenderType,window,fullscreen),
34    _javaRenderObj(NULL),
35    _javaRenderClass(NULL) {
36}
37
38AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
39  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
40               "AndroidSurfaceViewRenderer dtor");
41  if(g_jvm) {
42    // get the JNI env for this thread
43    bool isAttached = false;
44    JNIEnv* env = NULL;
45    if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
46      // try to attach the thread and get the env
47      // Attach this thread to JVM
48      jint res = g_jvm->AttachCurrentThread(&env, NULL);
49
50      // Get the JNI env for this thread
51      if ((res < 0) || !env) {
52        WEBRTC_TRACE(kTraceError,
53                     kTraceVideoRenderer,
54                     _id,
55                     "%s: Could not attach thread to JVM (%d, %p)",
56                     __FUNCTION__,
57                     res,
58                     env);
59        env=NULL;
60      }
61      else {
62        isAttached = true;
63      }
64    }
65    env->DeleteGlobalRef(_javaRenderObj);
66    env->DeleteGlobalRef(_javaRenderClass);
67
68    if (isAttached) {
69      if (g_jvm->DetachCurrentThread() < 0) {
70        WEBRTC_TRACE(kTraceWarning,
71                     kTraceVideoRenderer,
72                     _id,
73                     "%s: Could not detach thread from JVM",
74                     __FUNCTION__);
75      }
76    }
77  }
78}
79
80int32_t AndroidSurfaceViewRenderer::Init() {
81  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
82  if (!g_jvm) {
83    WEBRTC_TRACE(kTraceError,
84                 kTraceVideoRenderer,
85                 _id,
86                 "(%s): Not a valid Java VM pointer.",
87                 __FUNCTION__);
88    return -1;
89  }
90  if(!_ptrWindow) {
91    WEBRTC_TRACE(kTraceWarning,
92                 kTraceVideoRenderer,
93                 _id,
94                 "(%s): No window have been provided.",
95                 __FUNCTION__);
96    return -1;
97  }
98
99  // get the JNI env for this thread
100  bool isAttached = false;
101  JNIEnv* env = NULL;
102  if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
103    // try to attach the thread and get the env
104    // Attach this thread to JVM
105    jint res = g_jvm->AttachCurrentThread(&env, NULL);
106
107    // Get the JNI env for this thread
108    if ((res < 0) || !env) {
109      WEBRTC_TRACE(kTraceError,
110                   kTraceVideoRenderer,
111                   _id,
112                   "%s: Could not attach thread to JVM (%d, %p)",
113                   __FUNCTION__,
114                   res,
115                   env);
116      return -1;
117    }
118    isAttached = true;
119  }
120
121  // get the ViESurfaceRender class
122  jclass javaRenderClassLocal =
123      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
124  if (!javaRenderClassLocal) {
125    WEBRTC_TRACE(kTraceError,
126                 kTraceVideoRenderer,
127                 _id,
128                 "%s: could not find ViESurfaceRenderer",
129                 __FUNCTION__);
130    return -1;
131  }
132
133  // create a global reference to the class (to tell JNI that
134  // we are referencing it after this function has returned)
135  _javaRenderClass =
136      reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
137  if (!_javaRenderClass) {
138    WEBRTC_TRACE(kTraceError,
139                 kTraceVideoRenderer,
140                 _id,
141                 "%s: could not create Java ViESurfaceRenderer class reference",
142                 __FUNCTION__);
143    return -1;
144  }
145
146  // Delete local class ref, we only use the global ref
147  env->DeleteLocalRef(javaRenderClassLocal);
148
149  // get the method ID for the constructor
150  jmethodID cid = env->GetMethodID(_javaRenderClass,
151                                   "<init>",
152                                   "(Landroid/view/SurfaceView;)V");
153  if (cid == NULL) {
154    WEBRTC_TRACE(kTraceError,
155                 kTraceVideoRenderer,
156                 _id,
157                 "%s: could not get constructor ID",
158                 __FUNCTION__);
159    return -1; /* exception thrown */
160  }
161
162  // construct the object
163  jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
164                                              cid,
165                                              _ptrWindow);
166  if (!javaRenderObjLocal) {
167    WEBRTC_TRACE(kTraceError,
168                 kTraceVideoRenderer,
169                 _id,
170                 "%s: could not create Java Render",
171                 __FUNCTION__);
172    return -1;
173  }
174
175  // create a reference to the object (to tell JNI that we are referencing it
176  // after this function has returned)
177  _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
178  if (!_javaRenderObj) {
179    WEBRTC_TRACE(kTraceError,
180                 kTraceVideoRenderer,
181                 _id,
182                 "%s: could not create Java SurfaceRender object reference",
183                 __FUNCTION__);
184    return -1;
185  }
186
187  // Detach this thread if it was attached
188  if (isAttached) {
189    if (g_jvm->DetachCurrentThread() < 0) {
190      WEBRTC_TRACE(kTraceWarning,
191                   kTraceVideoRenderer,
192                   _id,
193                   "%s: Could not detach thread from JVM", __FUNCTION__);
194    }
195  }
196
197  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
198  return 0;
199}
200
201AndroidStream*
202AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
203    int32_t streamId,
204    int32_t zOrder,
205    const float left,
206    const float top,
207    const float right,
208    const float bottom,
209    VideoRenderAndroid& renderer) {
210  WEBRTC_TRACE(kTraceDebug,
211               kTraceVideoRenderer,
212               _id,
213               "%s: Id %d",
214               __FUNCTION__,
215               streamId);
216  AndroidSurfaceViewChannel* stream =
217      new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
218  if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
219    return stream;
220  else
221    delete stream;
222  return NULL;
223}
224
225AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
226    uint32_t streamId,
227    JavaVM* jvm,
228    VideoRenderAndroid& renderer,
229    jobject javaRenderObj) :
230    _id(streamId),
231    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
232    _renderer(renderer),
233    _jvm(jvm),
234    _javaRenderObj(javaRenderObj),
235#ifndef ANDROID_NDK_8_OR_ABOVE
236    _javaByteBufferObj(NULL),
237    _directBuffer(NULL),
238#endif
239    _bitmapWidth(0),
240    _bitmapHeight(0) {
241}
242
243AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
244  WEBRTC_TRACE(kTraceInfo,
245               kTraceVideoRenderer,
246               _id,
247               "AndroidSurfaceViewChannel dtor");
248  delete &_renderCritSect;
249  if(_jvm) {
250    // get the JNI env for this thread
251    bool isAttached = false;
252    JNIEnv* env = NULL;
253    if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
254      // try to attach the thread and get the env
255      // Attach this thread to JVM
256      jint res = _jvm->AttachCurrentThread(&env, NULL);
257
258      // Get the JNI env for this thread
259      if ((res < 0) || !env) {
260        WEBRTC_TRACE(kTraceError,
261                     kTraceVideoRenderer,
262                     _id,
263                     "%s: Could not attach thread to JVM (%d, %p)",
264                     __FUNCTION__,
265                     res,
266                     env);
267        env=NULL;
268      }
269      else {
270        isAttached = true;
271      }
272    }
273
274    env->DeleteGlobalRef(_javaByteBufferObj);
275    if (isAttached) {
276      if (_jvm->DetachCurrentThread() < 0) {
277        WEBRTC_TRACE(kTraceWarning,
278                     kTraceVideoRenderer,
279                     _id,
280                     "%s: Could not detach thread from JVM",
281                     __FUNCTION__);
282      }
283    }
284  }
285}
286
287int32_t AndroidSurfaceViewChannel::Init(
288    int32_t /*zOrder*/,
289    const float left,
290    const float top,
291    const float right,
292    const float bottom) {
293
294  WEBRTC_TRACE(kTraceDebug,
295               kTraceVideoRenderer,
296               _id,
297               "%s: AndroidSurfaceViewChannel",
298               __FUNCTION__);
299  if (!_jvm) {
300    WEBRTC_TRACE(kTraceError,
301                 kTraceVideoRenderer,
302                 _id,
303                 "%s: Not a valid Java VM pointer",
304                 __FUNCTION__);
305    return -1;
306  }
307
308  if( (top > 1 || top < 0) ||
309      (right > 1 || right < 0) ||
310      (bottom > 1 || bottom < 0) ||
311      (left > 1 || left < 0)) {
312    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
313                 "%s: Wrong coordinates", __FUNCTION__);
314    return -1;
315  }
316
317  // get the JNI env for this thread
318  bool isAttached = false;
319  JNIEnv* env = NULL;
320  if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
321    // try to attach the thread and get the env
322    // Attach this thread to JVM
323    jint res = _jvm->AttachCurrentThread(&env, NULL);
324
325    // Get the JNI env for this thread
326    if ((res < 0) || !env) {
327      WEBRTC_TRACE(kTraceError,
328                   kTraceVideoRenderer,
329                   _id,
330                   "%s: Could not attach thread to JVM (%d, %p)",
331                   __FUNCTION__,
332                   res,
333                   env);
334      return -1;
335    }
336    isAttached = true;
337  }
338
339  jclass javaRenderClass =
340      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
341  if (!javaRenderClass) {
342    WEBRTC_TRACE(kTraceError,
343                 kTraceVideoRenderer,
344                 _id,
345                 "%s: could not find ViESurfaceRenderer",
346                 __FUNCTION__);
347    return -1;
348  }
349
350  // get the method ID for the CreateIntArray
351  _createByteBufferCid =
352      env->GetMethodID(javaRenderClass,
353                       "CreateByteBuffer",
354                       "(II)Ljava/nio/ByteBuffer;");
355  if (_createByteBufferCid == NULL) {
356    WEBRTC_TRACE(kTraceError,
357                 kTraceVideoRenderer,
358                 _id,
359                 "%s: could not get CreateByteBuffer ID",
360                 __FUNCTION__);
361    return -1; /* exception thrown */
362  }
363
364  // get the method ID for the DrawByteBuffer function
365  _drawByteBufferCid = env->GetMethodID(javaRenderClass,
366                                        "DrawByteBuffer",
367                                        "()V");
368  if (_drawByteBufferCid == NULL) {
369    WEBRTC_TRACE(kTraceError,
370                 kTraceVideoRenderer,
371                 _id,
372                 "%s: could not get DrawByteBuffer ID",
373                 __FUNCTION__);
374    return -1; /* exception thrown */
375  }
376
377  // get the method ID for the SetCoordinates function
378  _setCoordinatesCid = env->GetMethodID(javaRenderClass,
379                                        "SetCoordinates",
380                                        "(FFFF)V");
381  if (_setCoordinatesCid == NULL) {
382    WEBRTC_TRACE(kTraceError,
383                 kTraceVideoRenderer,
384                 _id,
385                 "%s: could not get SetCoordinates ID",
386                 __FUNCTION__);
387    return -1; /* exception thrown */
388  }
389
390  env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
391                      left, top, right, bottom);
392
393  // Detach this thread if it was attached
394  if (isAttached) {
395    if (_jvm->DetachCurrentThread() < 0) {
396      WEBRTC_TRACE(kTraceWarning,
397                   kTraceVideoRenderer,
398                   _id,
399                   "%s: Could not detach thread from JVM",
400                   __FUNCTION__);
401    }
402  }
403
404  WEBRTC_TRACE(kTraceDebug,
405               kTraceVideoRenderer,
406               _id,
407               "%s: AndroidSurfaceViewChannel done",
408               __FUNCTION__);
409  return 0;
410}
411
412
413int32_t AndroidSurfaceViewChannel::RenderFrame(
414    const uint32_t /*streamId*/,
415    I420VideoFrame& videoFrame) {
416  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
417  _renderCritSect.Enter();
418  _bufferToRender.SwapFrame(&videoFrame);
419  _renderCritSect.Leave();
420  _renderer.ReDraw();
421  return 0;
422}
423
424
425/*Implements AndroidStream
426 * Calls the Java object and render the buffer in _bufferToRender
427 */
428void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
429  _renderCritSect.Enter();
430
431  if (_bitmapWidth != _bufferToRender.width() ||
432      _bitmapHeight != _bufferToRender.height()) {
433    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
434                 "%d",__FUNCTION__,
435                 _bufferToRender.width(), _bufferToRender.height());
436    if (_javaByteBufferObj) {
437      jniEnv->DeleteGlobalRef(_javaByteBufferObj);
438      _javaByteBufferObj = NULL;
439      _directBuffer = NULL;
440    }
441
442    jobject javaByteBufferObj =
443        jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
444                                 _bufferToRender.width(),
445                                 _bufferToRender.height());
446    _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
447    if (!_javaByteBufferObj) {
448      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not "
449                   "create Java ByteBuffer object reference", __FUNCTION__);
450      _renderCritSect.Leave();
451      return;
452    } else {
453      _directBuffer = static_cast<unsigned char*>
454          (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
455      _bitmapWidth = _bufferToRender.width();
456      _bitmapHeight = _bufferToRender.height();
457    }
458  }
459
460  if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
461    const int conversionResult =
462        ConvertFromI420(_bufferToRender, kRGB565, 0, _directBuffer);
463
464    if (conversionResult < 0)  {
465      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
466                   " failed.", __FUNCTION__);
467      _renderCritSect.Leave();
468      return;
469    }
470  }
471  _renderCritSect.Leave();
472  // Draw the Surface
473  jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
474}
475
476}  // namespace webrtc
477