1/*
2 * libjingle
3 * Copyright 2014, Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28package org.webrtc;
29
30import java.nio.ByteBuffer;
31import java.nio.ByteOrder;
32import java.nio.FloatBuffer;
33import java.util.ArrayList;
34import java.util.concurrent.CountDownLatch;
35import java.util.concurrent.LinkedBlockingQueue;
36
37import javax.microedition.khronos.egl.EGLConfig;
38import javax.microedition.khronos.opengles.GL10;
39
40import android.graphics.SurfaceTexture;
41import android.opengl.EGL14;
42import android.opengl.EGLContext;
43import android.opengl.GLES11Ext;
44import android.opengl.GLES20;
45import android.opengl.GLSurfaceView;
46import android.util.Log;
47
48import org.webrtc.VideoRenderer.I420Frame;
49
50/**
51 * Efficiently renders YUV frames using the GPU for CSC.
52 * Clients will want first to call setView() to pass GLSurfaceView
53 * and then for each video stream either create instance of VideoRenderer using
54 * createGui() call or VideoRenderer.Callbacks interface using create() call.
55 * Only one instance of the class can be created.
56 */
57public class VideoRendererGui implements GLSurfaceView.Renderer {
58  private static VideoRendererGui instance = null;
59  private static final String TAG = "VideoRendererGui";
60  private GLSurfaceView surface;
61  private static EGLContext eglContext = null;
62  // Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
63  // If true then for every newly created yuv image renderer createTexture()
64  // should be called. The variable is accessed on multiple threads and
65  // all accesses are synchronized on yuvImageRenderers' object lock.
66  private boolean onSurfaceCreatedCalled;
67  private int screenWidth;
68  private int screenHeight;
69  // List of yuv renderers.
70  private ArrayList<YuvImageRenderer> yuvImageRenderers;
71  private int yuvProgram;
72  private int oesProgram;
73  // Types of video scaling:
74  // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
75  //    maintaining the aspect ratio (black borders may be displayed).
76  // SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
77  //    maintaining the aspect ratio. Some portion of the video frame may be
78  //    clipped.
79  // SCALE_FILL - video frame is scaled to to fill the size of the view. Video
80  //    aspect ratio is changed if necessary.
81  private static enum ScalingType
82      { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_FILL };
83
84  private final String VERTEX_SHADER_STRING =
85      "varying vec2 interp_tc;\n" +
86      "attribute vec4 in_pos;\n" +
87      "attribute vec2 in_tc;\n" +
88      "\n" +
89      "void main() {\n" +
90      "  gl_Position = in_pos;\n" +
91      "  interp_tc = in_tc;\n" +
92      "}\n";
93
94  private final String YUV_FRAGMENT_SHADER_STRING =
95      "precision mediump float;\n" +
96      "varying vec2 interp_tc;\n" +
97      "\n" +
98      "uniform sampler2D y_tex;\n" +
99      "uniform sampler2D u_tex;\n" +
100      "uniform sampler2D v_tex;\n" +
101      "\n" +
102      "void main() {\n" +
103      // CSC according to http://www.fourcc.org/fccyvrgb.php
104      "  float y = texture2D(y_tex, interp_tc).r;\n" +
105      "  float u = texture2D(u_tex, interp_tc).r - 0.5;\n" +
106      "  float v = texture2D(v_tex, interp_tc).r - 0.5;\n" +
107      "  gl_FragColor = vec4(y + 1.403 * v, " +
108      "                      y - 0.344 * u - 0.714 * v, " +
109      "                      y + 1.77 * u, 1);\n" +
110      "}\n";
111
112
113  private static final String OES_FRAGMENT_SHADER_STRING =
114      "#extension GL_OES_EGL_image_external : require\n" +
115      "precision mediump float;\n" +
116      "varying vec2 interp_tc;\n" +
117      "\n" +
118      "uniform samplerExternalOES oes_tex;\n" +
119      "\n" +
120      "void main() {\n" +
121      "  gl_FragColor = texture2D(oes_tex, interp_tc);\n" +
122      "}\n";
123
124
125  private VideoRendererGui(GLSurfaceView surface) {
126    this.surface = surface;
127    // Create an OpenGL ES 2.0 context.
128    surface.setPreserveEGLContextOnPause(true);
129    surface.setEGLContextClientVersion(2);
130    surface.setRenderer(this);
131    surface.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
132
133    yuvImageRenderers = new ArrayList<YuvImageRenderer>();
134  }
135
136  // Poor-man's assert(): die with |msg| unless |condition| is true.
137  private static void abortUnless(boolean condition, String msg) {
138    if (!condition) {
139      throw new RuntimeException(msg);
140    }
141  }
142
143  // Assert that no OpenGL ES 2.0 error has been raised.
144  private static void checkNoGLES2Error() {
145    int error = GLES20.glGetError();
146    abortUnless(error == GLES20.GL_NO_ERROR, "GLES20 error: " + error);
147  }
148
149  // Wrap a float[] in a direct FloatBuffer using native byte order.
150  private static FloatBuffer directNativeFloatBuffer(float[] array) {
151    FloatBuffer buffer = ByteBuffer.allocateDirect(array.length * 4).order(
152        ByteOrder.nativeOrder()).asFloatBuffer();
153    buffer.put(array);
154    buffer.flip();
155    return buffer;
156  }
157
158  private int loadShader(int shaderType, String source) {
159    int[] result = new int[] {
160        GLES20.GL_FALSE
161    };
162    int shader = GLES20.glCreateShader(shaderType);
163    GLES20.glShaderSource(shader, source);
164    GLES20.glCompileShader(shader);
165    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, result, 0);
166    if (result[0] != GLES20.GL_TRUE) {
167      Log.e(TAG, "Could not compile shader " + shaderType + ":" +
168          GLES20.glGetShaderInfoLog(shader));
169      throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
170    }
171    checkNoGLES2Error();
172    return shader;
173}
174
175
176  private int createProgram(String vertexSource, String fragmentSource) {
177    int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
178    int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
179    int program = GLES20.glCreateProgram();
180    if (program == 0) {
181      throw new RuntimeException("Could not create program");
182    }
183    GLES20.glAttachShader(program, vertexShader);
184    GLES20.glAttachShader(program, fragmentShader);
185    GLES20.glLinkProgram(program);
186    int[] linkStatus = new int[] {
187        GLES20.GL_FALSE
188    };
189    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
190    if (linkStatus[0] != GLES20.GL_TRUE) {
191      Log.e(TAG, "Could not link program: " +
192          GLES20.glGetProgramInfoLog(program));
193      throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
194    }
195    checkNoGLES2Error();
196    return program;
197}
198
199  /**
200   * Class used to display stream of YUV420 frames at particular location
201   * on a screen. New video frames are sent to display using renderFrame()
202   * call.
203   */
204  private static class YuvImageRenderer implements VideoRenderer.Callbacks {
205    private GLSurfaceView surface;
206    private int id;
207    private int yuvProgram;
208    private int oesProgram;
209    private int[] yuvTextures = { -1, -1, -1 };
210    private int oesTexture = -1;
211    private float[] stMatrix = new float[16];
212
213    // Render frame queue - accessed by two threads. renderFrame() call does
214    // an offer (writing I420Frame to render) and early-returns (recording
215    // a dropped frame) if that queue is full. draw() call does a peek(),
216    // copies frame to texture and then removes it from a queue using poll().
217    LinkedBlockingQueue<I420Frame> frameToRenderQueue;
218    // Local copy of incoming video frame.
219    private I420Frame yuvFrameToRender;
220    private I420Frame textureFrameToRender;
221    // Type of video frame used for recent frame rendering.
222    private static enum RendererType { RENDERER_YUV, RENDERER_TEXTURE };
223    private RendererType rendererType;
224    private ScalingType scalingType;
225    // Flag if renderFrame() was ever called.
226    boolean seenFrame;
227    // Total number of video frames received in renderFrame() call.
228    private int framesReceived;
229    // Number of video frames dropped by renderFrame() because previous
230    // frame has not been rendered yet.
231    private int framesDropped;
232    // Number of rendered video frames.
233    private int framesRendered;
234    // Time in ns when the first video frame was rendered.
235    private long startTimeNs = -1;
236    // Time in ns spent in draw() function.
237    private long drawTimeNs;
238    // Time in ns spent in renderFrame() function - including copying frame
239    // data to rendering planes.
240    private long copyTimeNs;
241    // Texture vertices.
242    private float texLeft;
243    private float texRight;
244    private float texTop;
245    private float texBottom;
246    private FloatBuffer textureVertices;
247    // Texture UV coordinates offsets.
248    private float texOffsetU;
249    private float texOffsetV;
250    private FloatBuffer textureCoords;
251    // Flag if texture vertices or coordinates update is needed.
252    private boolean updateTextureProperties;
253    // Viewport dimensions.
254    private int screenWidth;
255    private int screenHeight;
256    // Video dimension.
257    private int videoWidth;
258    private int videoHeight;
259
260    private YuvImageRenderer(
261        GLSurfaceView surface, int id,
262        int x, int y, int width, int height,
263        ScalingType scalingType) {
264      Log.d(TAG, "YuvImageRenderer.Create id: " + id);
265      this.surface = surface;
266      this.id = id;
267      this.scalingType = scalingType;
268      frameToRenderQueue = new LinkedBlockingQueue<I420Frame>(1);
269      // Create texture vertices.
270      texLeft = (x - 50) / 50.0f;
271      texTop = (50 - y) / 50.0f;
272      texRight = Math.min(1.0f, (x + width - 50) / 50.0f);
273      texBottom = Math.max(-1.0f, (50 - y - height) / 50.0f);
274      float textureVeticesFloat[] = new float[] {
275          texLeft, texTop,
276          texLeft, texBottom,
277          texRight, texTop,
278          texRight, texBottom
279      };
280      textureVertices = directNativeFloatBuffer(textureVeticesFloat);
281      // Create texture UV coordinates.
282      texOffsetU = 0;
283      texOffsetV = 0;
284      float textureCoordinatesFloat[] = new float[] {
285          texOffsetU, texOffsetV,               // left top
286          texOffsetU, 1.0f - texOffsetV,        // left bottom
287          1.0f - texOffsetU, texOffsetV,        // right top
288          1.0f - texOffsetU, 1.0f - texOffsetV  // right bottom
289      };
290      textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
291      updateTextureProperties = false;
292    }
293
294    private void createTextures(int yuvProgram, int oesProgram) {
295      Log.d(TAG, "  YuvImageRenderer.createTextures " + id + " on GL thread:" +
296          Thread.currentThread().getId());
297      this.yuvProgram = yuvProgram;
298      this.oesProgram = oesProgram;
299
300      // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
301      GLES20.glGenTextures(3, yuvTextures, 0);
302      for (int i = 0; i < 3; i++)  {
303        GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
304        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
305        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
306            128, 128, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, null);
307        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
308            GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
309        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
310            GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
311        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
312            GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
313        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
314            GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
315      }
316      checkNoGLES2Error();
317    }
318
319    private void checkAdjustTextureCoords() {
320      if (!updateTextureProperties ||
321          scalingType == ScalingType.SCALE_FILL) {
322        return;
323      }
324      // Re - calculate texture vertices to preserve video aspect ratio.
325      float texRight = this.texRight;
326      float texLeft = this.texLeft;
327      float texTop = this.texTop;
328      float texBottom = this.texBottom;
329      float displayWidth = (texRight - texLeft) * screenWidth / 2;
330      float displayHeight = (texTop - texBottom) * screenHeight / 2;
331      if (displayWidth > 1 && displayHeight > 1 &&
332          videoWidth > 1 && videoHeight > 1) {
333        float displayAspectRatio = displayWidth / displayHeight;
334        float videoAspectRatio = (float)videoWidth / videoHeight;
335        if (scalingType == ScalingType.SCALE_ASPECT_FIT) {
336          // Need to re-adjust vertices width or height to match video AR.
337          if (displayAspectRatio > videoAspectRatio) {
338            float deltaX = (displayWidth - videoAspectRatio * displayHeight) /
339                    instance.screenWidth;
340            texRight -= deltaX;
341            texLeft += deltaX;
342          } else {
343            float deltaY = (displayHeight - displayWidth / videoAspectRatio) /
344                    instance.screenHeight;
345            texTop -= deltaY;
346            texBottom += deltaY;
347          }
348          // Re-allocate vertices buffer to adjust to video aspect ratio.
349          float textureVeticesFloat[] = new float[] {
350              texLeft, texTop,
351              texLeft, texBottom,
352              texRight, texTop,
353              texRight, texBottom
354          };
355          textureVertices = directNativeFloatBuffer(textureVeticesFloat);
356        }
357        if (scalingType == ScalingType.SCALE_ASPECT_FILL) {
358          // Need to re-adjust UV coordinates to match display AR.
359          if (displayAspectRatio > videoAspectRatio) {
360            texOffsetV = (1.0f - videoAspectRatio / displayAspectRatio) / 2.0f;
361          } else {
362            texOffsetU = (1.0f - displayAspectRatio / videoAspectRatio) / 2.0f;
363          }
364          // Re-allocate coordinates buffer to adjust to display aspect ratio.
365          float textureCoordinatesFloat[] = new float[] {
366              texOffsetU, texOffsetV,               // left top
367              texOffsetU, 1.0f - texOffsetV,        // left bottom
368              1.0f - texOffsetU, texOffsetV,        // right top
369              1.0f - texOffsetU, 1.0f - texOffsetV  // right bottom
370          };
371          textureCoords = directNativeFloatBuffer(textureCoordinatesFloat);
372        }
373      }
374      updateTextureProperties = false;
375    }
376
377    private void draw() {
378      if (!seenFrame) {
379        // No frame received yet - nothing to render.
380        return;
381      }
382      // Check if texture vertices/coordinates adjustment is required when
383      // screen orientation changes or video frame size changes.
384      checkAdjustTextureCoords();
385
386      long now = System.nanoTime();
387
388      I420Frame frameFromQueue;
389      synchronized (frameToRenderQueue) {
390        frameFromQueue = frameToRenderQueue.peek();
391        if (frameFromQueue != null && startTimeNs == -1) {
392          startTimeNs = now;
393        }
394
395        if (rendererType == RendererType.RENDERER_YUV) {
396          // YUV textures rendering.
397          GLES20.glUseProgram(yuvProgram);
398
399          for (int i = 0; i < 3; ++i) {
400            GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
401            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
402            if (frameFromQueue != null) {
403              int w = (i == 0) ?
404                  frameFromQueue.width : frameFromQueue.width / 2;
405              int h = (i == 0) ?
406                  frameFromQueue.height : frameFromQueue.height / 2;
407              GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
408                  w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
409                  frameFromQueue.yuvPlanes[i]);
410            }
411          }
412        } else {
413          // External texture rendering.
414          GLES20.glUseProgram(oesProgram);
415
416          if (frameFromQueue != null) {
417            oesTexture = frameFromQueue.textureId;
418            if (frameFromQueue.textureObject instanceof SurfaceTexture) {
419              SurfaceTexture surfaceTexture =
420                  (SurfaceTexture) frameFromQueue.textureObject;
421              surfaceTexture.updateTexImage();
422              surfaceTexture.getTransformMatrix(stMatrix);
423            }
424          }
425          GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
426          GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTexture);
427        }
428
429        if (frameFromQueue != null) {
430          frameToRenderQueue.poll();
431        }
432      }
433
434      if (rendererType == RendererType.RENDERER_YUV) {
435        GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "y_tex"), 0);
436        GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "u_tex"), 1);
437        GLES20.glUniform1i(GLES20.glGetUniformLocation(yuvProgram, "v_tex"), 2);
438      }
439
440      int posLocation = GLES20.glGetAttribLocation(yuvProgram, "in_pos");
441      if (posLocation == -1) {
442        throw new RuntimeException("Could not get attrib location for in_pos");
443      }
444      GLES20.glEnableVertexAttribArray(posLocation);
445      GLES20.glVertexAttribPointer(
446          posLocation, 2, GLES20.GL_FLOAT, false, 0, textureVertices);
447
448      int texLocation = GLES20.glGetAttribLocation(yuvProgram, "in_tc");
449      if (texLocation == -1) {
450        throw new RuntimeException("Could not get attrib location for in_tc");
451      }
452      GLES20.glEnableVertexAttribArray(texLocation);
453      GLES20.glVertexAttribPointer(
454          texLocation, 2, GLES20.GL_FLOAT, false, 0, textureCoords);
455
456      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
457
458      GLES20.glDisableVertexAttribArray(posLocation);
459      GLES20.glDisableVertexAttribArray(texLocation);
460
461      checkNoGLES2Error();
462
463      if (frameFromQueue != null) {
464        framesRendered++;
465        drawTimeNs += (System.nanoTime() - now);
466        if ((framesRendered % 150) == 0) {
467          logStatistics();
468        }
469      }
470    }
471
472    private void logStatistics() {
473      long timeSinceFirstFrameNs = System.nanoTime() - startTimeNs;
474      Log.d(TAG, "ID: " + id + ". Type: " + rendererType +
475          ". Frames received: " + framesReceived +
476          ". Dropped: " + framesDropped + ". Rendered: " + framesRendered);
477      if (framesReceived > 0 && framesRendered > 0) {
478        Log.d(TAG, "Duration: " + (int)(timeSinceFirstFrameNs / 1e6) +
479            " ms. FPS: " + (float)framesRendered * 1e9 / timeSinceFirstFrameNs);
480        Log.d(TAG, "Draw time: " +
481            (int) (drawTimeNs / (1000 * framesRendered)) + " us. Copy time: " +
482            (int) (copyTimeNs / (1000 * framesReceived)) + " us");
483      }
484    }
485
486    public void setScreenSize(final int screenWidth, final int screenHeight) {
487      this.screenWidth = screenWidth;
488      this.screenHeight = screenHeight;
489      updateTextureProperties = true;
490    }
491
492    @Override
493    public void setSize(final int width, final int height) {
494      Log.d(TAG, "ID: " + id + ". YuvImageRenderer.setSize: " +
495          width + " x " + height);
496      videoWidth = width;
497      videoHeight = height;
498      int[] strides = { width, width / 2, width / 2  };
499      // Frame re-allocation need to be synchronized with copying
500      // frame to textures in draw() function to avoid re-allocating
501      // the frame while it is being copied.
502      synchronized (frameToRenderQueue) {
503        // Clear rendering queue.
504        frameToRenderQueue.poll();
505        // Re-allocate / allocate the frame.
506        yuvFrameToRender = new I420Frame(width, height, strides, null);
507        textureFrameToRender = new I420Frame(width, height, null, -1);
508        updateTextureProperties = true;
509      }
510    }
511
512    @Override
513    public synchronized void renderFrame(I420Frame frame) {
514      long now = System.nanoTime();
515      framesReceived++;
516      // Skip rendering of this frame if setSize() was not called.
517      if (yuvFrameToRender == null || textureFrameToRender == null) {
518        framesDropped++;
519        return;
520      }
521      // Check input frame parameters.
522      if (frame.yuvFrame) {
523        if (!(frame.yuvStrides[0] == frame.width &&
524            frame.yuvStrides[1] == frame.width / 2 &&
525            frame.yuvStrides[2] == frame.width / 2)) {
526          Log.e(TAG, "Incorrect strides " + frame.yuvStrides[0] + ", " +
527              frame.yuvStrides[1] + ", " + frame.yuvStrides[2]);
528          return;
529        }
530        // Check incoming frame dimensions.
531        if (frame.width != yuvFrameToRender.width ||
532            frame.height != yuvFrameToRender.height) {
533          throw new RuntimeException("Wrong frame size " +
534              frame.width + " x " + frame.height);
535        }
536      }
537
538      if (frameToRenderQueue.size() > 0) {
539        // Skip rendering of this frame if previous frame was not rendered yet.
540        framesDropped++;
541        return;
542      }
543
544      // Create a local copy of the frame.
545      if (frame.yuvFrame) {
546        yuvFrameToRender.copyFrom(frame);
547        rendererType = RendererType.RENDERER_YUV;
548        frameToRenderQueue.offer(yuvFrameToRender);
549      } else {
550        textureFrameToRender.copyFrom(frame);
551        rendererType = RendererType.RENDERER_TEXTURE;
552        frameToRenderQueue.offer(textureFrameToRender);
553      }
554      copyTimeNs += (System.nanoTime() - now);
555      seenFrame = true;
556
557      // Request rendering.
558      surface.requestRender();
559    }
560
561  }
562
563  /** Passes GLSurfaceView to video renderer. */
564  public static void setView(GLSurfaceView surface) {
565    Log.d(TAG, "VideoRendererGui.setView");
566    instance = new VideoRendererGui(surface);
567  }
568
569  public static EGLContext getEGLContext() {
570    return eglContext;
571  }
572
573  /**
574   * Creates VideoRenderer with top left corner at (x, y) and resolution
575   * (width, height). All parameters are in percentage of screen resolution.
576   */
577  public static VideoRenderer createGui(
578      int x, int y, int width, int height) throws Exception {
579    YuvImageRenderer javaGuiRenderer = create(x, y, width, height);
580    return new VideoRenderer(javaGuiRenderer);
581  }
582
583  public static VideoRenderer.Callbacks createGuiRenderer(
584      int x, int y, int width, int height) {
585    return create(x, y, width, height);
586  }
587
588  /**
589   * Creates VideoRenderer.Callbacks with top left corner at (x, y) and
590   * resolution (width, height). All parameters are in percentage of
591   * screen resolution.
592   */
593  public static YuvImageRenderer create(
594      int x, int y, int width, int height) {
595    // Check display region parameters.
596    if (x < 0 || x > 100 || y < 0 || y > 100 ||
597        width < 0 || width > 100 || height < 0 || height > 100 ||
598        x + width > 100 || y + height > 100) {
599      throw new RuntimeException("Incorrect window parameters.");
600    }
601
602    if (instance == null) {
603      throw new RuntimeException(
604          "Attempt to create yuv renderer before setting GLSurfaceView");
605    }
606    final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
607        instance.surface, instance.yuvImageRenderers.size(),
608        x, y, width, height, ScalingType.SCALE_ASPECT_FIT);
609    synchronized (instance.yuvImageRenderers) {
610      if (instance.onSurfaceCreatedCalled) {
611        // onSurfaceCreated has already been called for VideoRendererGui -
612        // need to create texture for new image and add image to the
613        // rendering list.
614        final CountDownLatch countDownLatch = new CountDownLatch(1);
615        instance.surface.queueEvent(new Runnable() {
616          public void run() {
617            yuvImageRenderer.createTextures(
618                instance.yuvProgram, instance.oesProgram);
619            yuvImageRenderer.setScreenSize(
620                instance.screenWidth, instance.screenHeight);
621            countDownLatch.countDown();
622          }
623        });
624        // Wait for task completion.
625        try {
626          countDownLatch.await();
627        } catch (InterruptedException e) {
628          throw new RuntimeException(e);
629        }
630      }
631      // Add yuv renderer to rendering list.
632      instance.yuvImageRenderers.add(yuvImageRenderer);
633    }
634    return yuvImageRenderer;
635  }
636
637  @Override
638  public void onSurfaceCreated(GL10 unused, EGLConfig config) {
639    Log.d(TAG, "VideoRendererGui.onSurfaceCreated");
640    // Store render EGL context
641    eglContext = EGL14.eglGetCurrentContext();
642    Log.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
643
644    // Create YUV and OES programs.
645    yuvProgram = createProgram(VERTEX_SHADER_STRING,
646        YUV_FRAGMENT_SHADER_STRING);
647    oesProgram = createProgram(VERTEX_SHADER_STRING,
648        OES_FRAGMENT_SHADER_STRING);
649
650    synchronized (yuvImageRenderers) {
651      // Create textures for all images.
652      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
653        yuvImageRenderer.createTextures(yuvProgram, oesProgram);
654      }
655      onSurfaceCreatedCalled = true;
656    }
657    checkNoGLES2Error();
658    GLES20.glClearColor(0.0f, 0.0f, 0.1f, 1.0f);
659  }
660
661  @Override
662  public void onSurfaceChanged(GL10 unused, int width, int height) {
663    Log.d(TAG, "VideoRendererGui.onSurfaceChanged: " +
664        width + " x " + height + "  ");
665    screenWidth = width;
666    screenHeight = height;
667    GLES20.glViewport(0, 0, width, height);
668    synchronized (yuvImageRenderers) {
669      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
670        yuvImageRenderer.setScreenSize(screenWidth, screenHeight);
671      }
672    }
673  }
674
675  @Override
676  public void onDrawFrame(GL10 unused) {
677    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
678    synchronized (yuvImageRenderers) {
679      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
680        yuvImageRenderer.draw();
681      }
682    }
683  }
684
685}
686