SurfaceTextureRenderer.java revision 0fd198ad89ec9c600bb1761b10d938146c28bb98
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16package android.hardware.camera2.legacy; 17 18import android.graphics.ImageFormat; 19import android.graphics.RectF; 20import android.graphics.SurfaceTexture; 21import android.hardware.camera2.CameraCharacteristics; 22import android.os.Environment; 23import android.opengl.EGL14; 24import android.opengl.EGLConfig; 25import android.opengl.EGLContext; 26import android.opengl.EGLDisplay; 27import android.opengl.EGLSurface; 28import android.opengl.GLES11Ext; 29import android.opengl.GLES20; 30import android.opengl.Matrix; 31import android.text.format.Time; 32import android.util.Log; 33import android.util.Pair; 34import android.util.Size; 35import android.view.Surface; 36import android.os.SystemProperties; 37 38import java.io.File; 39import java.nio.ByteBuffer; 40import java.nio.ByteOrder; 41import java.nio.FloatBuffer; 42import java.util.ArrayList; 43import java.util.Collection; 44import java.util.List; 45 46/** 47 * A renderer class that manages the GL state, and can draw a frame into a set of output 48 * {@link Surface}s. 49 */ 50public class SurfaceTextureRenderer { 51 private static final String TAG = SurfaceTextureRenderer.class.getSimpleName(); 52 private static final boolean DEBUG = Log.isLoggable(LegacyCameraDevice.DEBUG_PROP, Log.DEBUG); 53 private static final int EGL_RECORDABLE_ANDROID = 0x3142; // from EGL/eglext.h 54 private static final int GL_MATRIX_SIZE = 16; 55 private static final int VERTEX_POS_SIZE = 3; 56 private static final int VERTEX_UV_SIZE = 2; 57 private static final int EGL_COLOR_BITLENGTH = 8; 58 private static final int GLES_VERSION = 2; 59 private static final int PBUFFER_PIXEL_BYTES = 4; 60 61 private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; 62 private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; 63 private EGLConfig mConfigs; 64 65 private class EGLSurfaceHolder { 66 Surface surface; 67 EGLSurface eglSurface; 68 int width; 69 int height; 70 } 71 72 private List<EGLSurfaceHolder> mSurfaces = new ArrayList<EGLSurfaceHolder>(); 73 private List<EGLSurfaceHolder> mConversionSurfaces = new ArrayList<EGLSurfaceHolder>(); 74 75 private ByteBuffer mPBufferPixels; 76 77 // Hold this to avoid GC 78 private volatile SurfaceTexture mSurfaceTexture; 79 80 private static final int FLOAT_SIZE_BYTES = 4; 81 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 82 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 83 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 84 85 // Sampling is mirrored across the vertical axis to undo horizontal flip from the front camera 86 private static final float[] sFrontCameraTriangleVertices = { 87 // X, Y, Z, U, V 88 -1.0f, -1.0f, 0, 1.f, 0.f, 89 1.0f, -1.0f, 0, 0.f, 0.f, 90 -1.0f, 1.0f, 0, 1.f, 1.f, 91 1.0f, 1.0f, 0, 0.f, 1.f, 92 }; 93 94 // Sampling is 1:1 for a straight copy for the back camera 95 private static final float[] sBackCameraTriangleVertices = { 96 // X, Y, Z, U, V 97 -1.0f, -1.0f, 0, 0.f, 0.f, 98 1.0f, -1.0f, 0, 1.f, 0.f, 99 -1.0f, 1.0f, 0, 0.f, 1.f, 100 1.0f, 1.0f, 0, 1.f, 1.f, 101 }; 102 103 private FloatBuffer mTriangleVertices; 104 105 /** 106 * As used in this file, this vertex shader maps a unit square to the view, and 107 * tells the fragment shader to interpolate over it. Each surface pixel position 108 * is mapped to a 2D homogeneous texture coordinate of the form (s, t, 0, 1) with 109 * s and t in the inclusive range [0, 1], and the matrix from 110 * {@link SurfaceTexture#getTransformMatrix(float[])} is used to map this 111 * coordinate to a texture location. 112 */ 113 private static final String VERTEX_SHADER = 114 "uniform mat4 uMVPMatrix;\n" + 115 "uniform mat4 uSTMatrix;\n" + 116 "attribute vec4 aPosition;\n" + 117 "attribute vec4 aTextureCoord;\n" + 118 "varying vec2 vTextureCoord;\n" + 119 "void main() {\n" + 120 " gl_Position = uMVPMatrix * aPosition;\n" + 121 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 122 "}\n"; 123 124 /** 125 * This fragment shader simply draws the color in the 2D texture at 126 * the location from the {@code VERTEX_SHADER}. 127 */ 128 private static final String FRAGMENT_SHADER = 129 "#extension GL_OES_EGL_image_external : require\n" + 130 "precision mediump float;\n" + 131 "varying vec2 vTextureCoord;\n" + 132 "uniform samplerExternalOES sTexture;\n" + 133 "void main() {\n" + 134 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 135 "}\n"; 136 137 private float[] mMVPMatrix = new float[GL_MATRIX_SIZE]; 138 private float[] mSTMatrix = new float[GL_MATRIX_SIZE]; 139 140 private int mProgram; 141 private int mTextureID = 0; 142 private int muMVPMatrixHandle; 143 private int muSTMatrixHandle; 144 private int maPositionHandle; 145 private int maTextureHandle; 146 147 private PerfMeasurement mPerfMeasurer = null; 148 private static final String LEGACY_PERF_PROPERTY = "persist.camera.legacy_perf"; 149 150 public SurfaceTextureRenderer(int facing) { 151 if (facing == CameraCharacteristics.LENS_FACING_BACK) { 152 mTriangleVertices = ByteBuffer.allocateDirect(sBackCameraTriangleVertices.length * 153 FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer(); 154 mTriangleVertices.put(sBackCameraTriangleVertices).position(0); 155 } else { 156 mTriangleVertices = ByteBuffer.allocateDirect(sFrontCameraTriangleVertices.length * 157 FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer(); 158 mTriangleVertices.put(sFrontCameraTriangleVertices).position(0); 159 } 160 Matrix.setIdentityM(mSTMatrix, 0); 161 } 162 163 private int loadShader(int shaderType, String source) { 164 int shader = GLES20.glCreateShader(shaderType); 165 checkGlError("glCreateShader type=" + shaderType); 166 GLES20.glShaderSource(shader, source); 167 GLES20.glCompileShader(shader); 168 int[] compiled = new int[1]; 169 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 170 if (compiled[0] == 0) { 171 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 172 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 173 GLES20.glDeleteShader(shader); 174 // TODO: handle this more gracefully 175 throw new IllegalStateException("Could not compile shader " + shaderType); 176 } 177 return shader; 178 } 179 180 private int createProgram(String vertexSource, String fragmentSource) { 181 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 182 if (vertexShader == 0) { 183 return 0; 184 } 185 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 186 if (pixelShader == 0) { 187 return 0; 188 } 189 190 int program = GLES20.glCreateProgram(); 191 checkGlError("glCreateProgram"); 192 if (program == 0) { 193 Log.e(TAG, "Could not create program"); 194 } 195 GLES20.glAttachShader(program, vertexShader); 196 checkGlError("glAttachShader"); 197 GLES20.glAttachShader(program, pixelShader); 198 checkGlError("glAttachShader"); 199 GLES20.glLinkProgram(program); 200 int[] linkStatus = new int[1]; 201 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 202 if (linkStatus[0] != GLES20.GL_TRUE) { 203 Log.e(TAG, "Could not link program: "); 204 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 205 GLES20.glDeleteProgram(program); 206 // TODO: handle this more gracefully 207 throw new IllegalStateException("Could not link program"); 208 } 209 return program; 210 } 211 212 private void drawFrame(SurfaceTexture st, int width, int height) { 213 checkGlError("onDrawFrame start"); 214 st.getTransformMatrix(mSTMatrix); 215 216 Matrix.setIdentityM(mMVPMatrix, /*smOffset*/0); 217 218 // Find intermediate buffer dimensions 219 Size dimens; 220 try { 221 dimens = LegacyCameraDevice.getTextureSize(st); 222 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 223 // Should never hit this. 224 throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e); 225 } 226 float texWidth = dimens.getWidth(); 227 float texHeight = dimens.getHeight(); 228 229 if (texWidth <= 0 || texHeight <= 0) { 230 throw new IllegalStateException("Illegal intermediate texture with dimension of 0"); 231 } 232 233 // Letterbox or pillerbox output dimensions into intermediate dimensions. 234 RectF intermediate = new RectF(/*left*/0, /*top*/0, /*right*/texWidth, /*bottom*/texHeight); 235 RectF output = new RectF(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height); 236 android.graphics.Matrix boxingXform = new android.graphics.Matrix(); 237 boxingXform.setRectToRect(output, intermediate, android.graphics.Matrix.ScaleToFit.CENTER); 238 boxingXform.mapRect(output); 239 240 // Find scaling factor from pillerboxed/letterboxed output dimensions to intermediate 241 // buffer dimensions. 242 float scaleX = intermediate.width() / output.width(); 243 float scaleY = intermediate.height() / output.height(); 244 245 // Scale opposite dimension in clip coordinates so output is letterboxed/pillerboxed into 246 // the intermediate dimensions (rather than vice-versa). 247 Matrix.scaleM(mMVPMatrix, /*offset*/0, /*x*/scaleY, /*y*/scaleX, /*z*/1); 248 249 if (DEBUG) { 250 Log.d(TAG, "Scaling factors (S_x = " + scaleX + ",S_y = " + scaleY + ") used for " + 251 width + "x" + height + " surface, intermediate buffer size is " + texWidth + 252 "x" + texHeight); 253 } 254 255 // Set viewport to be output buffer dimensions 256 GLES20.glViewport(0, 0, width, height); 257 258 if (DEBUG) { 259 GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f); 260 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 261 } 262 263 GLES20.glUseProgram(mProgram); 264 checkGlError("glUseProgram"); 265 266 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 267 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 268 269 mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 270 GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT, 271 /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 272 checkGlError("glVertexAttribPointer maPosition"); 273 GLES20.glEnableVertexAttribArray(maPositionHandle); 274 checkGlError("glEnableVertexAttribArray maPositionHandle"); 275 276 mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 277 GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT, 278 /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 279 checkGlError("glVertexAttribPointer maTextureHandle"); 280 GLES20.glEnableVertexAttribArray(maTextureHandle); 281 checkGlError("glEnableVertexAttribArray maTextureHandle"); 282 283 GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/ 1, /*transpose*/ false, mMVPMatrix, 284 /*offset*/ 0); 285 GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/ 1, /*transpose*/ false, mSTMatrix, 286 /*offset*/ 0); 287 288 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/ 0, /*count*/ 4); 289 checkGlError("glDrawArrays"); 290 } 291 292 /** 293 * Initializes GL state. Call this after the EGL surface has been created and made current. 294 */ 295 private void initializeGLState() { 296 mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 297 if (mProgram == 0) { 298 throw new IllegalStateException("failed creating program"); 299 } 300 maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); 301 checkGlError("glGetAttribLocation aPosition"); 302 if (maPositionHandle == -1) { 303 throw new IllegalStateException("Could not get attrib location for aPosition"); 304 } 305 maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); 306 checkGlError("glGetAttribLocation aTextureCoord"); 307 if (maTextureHandle == -1) { 308 throw new IllegalStateException("Could not get attrib location for aTextureCoord"); 309 } 310 311 muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); 312 checkGlError("glGetUniformLocation uMVPMatrix"); 313 if (muMVPMatrixHandle == -1) { 314 throw new IllegalStateException("Could not get attrib location for uMVPMatrix"); 315 } 316 317 muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); 318 checkGlError("glGetUniformLocation uSTMatrix"); 319 if (muSTMatrixHandle == -1) { 320 throw new IllegalStateException("Could not get attrib location for uSTMatrix"); 321 } 322 323 int[] textures = new int[1]; 324 GLES20.glGenTextures(/*n*/ 1, textures, /*offset*/ 0); 325 326 mTextureID = textures[0]; 327 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 328 checkGlError("glBindTexture mTextureID"); 329 330 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 331 GLES20.GL_NEAREST); 332 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 333 GLES20.GL_LINEAR); 334 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 335 GLES20.GL_CLAMP_TO_EDGE); 336 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 337 GLES20.GL_CLAMP_TO_EDGE); 338 checkGlError("glTexParameter"); 339 } 340 341 private int getTextureId() { 342 return mTextureID; 343 } 344 345 private void clearState() { 346 mSurfaces.clear(); 347 mConversionSurfaces.clear(); 348 mPBufferPixels = null; 349 if (mSurfaceTexture != null) { 350 mSurfaceTexture.release(); 351 } 352 mSurfaceTexture = null; 353 } 354 355 private void configureEGLContext() { 356 mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 357 if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 358 throw new IllegalStateException("No EGL14 display"); 359 } 360 int[] version = new int[2]; 361 if (!EGL14.eglInitialize(mEGLDisplay, version, /*offset*/ 0, version, /*offset*/ 1)) { 362 throw new IllegalStateException("Cannot initialize EGL14"); 363 } 364 365 int[] attribList = { 366 EGL14.EGL_RED_SIZE, EGL_COLOR_BITLENGTH, 367 EGL14.EGL_GREEN_SIZE, EGL_COLOR_BITLENGTH, 368 EGL14.EGL_BLUE_SIZE, EGL_COLOR_BITLENGTH, 369 EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 370 EGL_RECORDABLE_ANDROID, 1, 371 EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT | EGL14.EGL_WINDOW_BIT, 372 EGL14.EGL_NONE 373 }; 374 EGLConfig[] configs = new EGLConfig[1]; 375 int[] numConfigs = new int[1]; 376 EGL14.eglChooseConfig(mEGLDisplay, attribList, /*offset*/ 0, configs, /*offset*/ 0, 377 configs.length, numConfigs, /*offset*/ 0); 378 checkEglError("eglCreateContext RGB888+recordable ES2"); 379 mConfigs = configs[0]; 380 int[] attrib_list = { 381 EGL14.EGL_CONTEXT_CLIENT_VERSION, GLES_VERSION, 382 EGL14.EGL_NONE 383 }; 384 mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, 385 attrib_list, /*offset*/ 0); 386 checkEglError("eglCreateContext"); 387 if(mEGLContext == EGL14.EGL_NO_CONTEXT) { 388 throw new IllegalStateException("No EGLContext could be made"); 389 } 390 } 391 392 private void configureEGLOutputSurfaces(Collection<EGLSurfaceHolder> surfaces) { 393 if (surfaces == null || surfaces.size() == 0) { 394 throw new IllegalStateException("No Surfaces were provided to draw to"); 395 } 396 int[] surfaceAttribs = { 397 EGL14.EGL_NONE 398 }; 399 for (EGLSurfaceHolder holder : surfaces) { 400 try { 401 Size size = LegacyCameraDevice.getSurfaceSize(holder.surface); 402 holder.width = size.getWidth(); 403 holder.height = size.getHeight(); 404 holder.eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mConfigs, 405 holder.surface, surfaceAttribs, /*offset*/ 0); 406 checkEglError("eglCreateWindowSurface"); 407 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 408 Log.w(TAG, "Surface abandoned, skipping...", e); 409 } 410 } 411 } 412 413 private void configureEGLPbufferSurfaces(Collection<EGLSurfaceHolder> surfaces) { 414 if (surfaces == null || surfaces.size() == 0) { 415 throw new IllegalStateException("No Surfaces were provided to draw to"); 416 } 417 418 int maxLength = 0; 419 for (EGLSurfaceHolder holder : surfaces) { 420 try { 421 Size size = LegacyCameraDevice.getSurfaceSize(holder.surface); 422 int length = size.getWidth() * size.getHeight(); 423 // Find max surface size, ensure PBuffer can hold this many pixels 424 maxLength = (length > maxLength) ? length : maxLength; 425 int[] surfaceAttribs = { 426 EGL14.EGL_WIDTH, size.getWidth(), 427 EGL14.EGL_HEIGHT, size.getHeight(), 428 EGL14.EGL_NONE 429 }; 430 holder.width = size.getWidth(); 431 holder.height = size.getHeight(); 432 holder.eglSurface = 433 EGL14.eglCreatePbufferSurface(mEGLDisplay, mConfigs, surfaceAttribs, 0); 434 checkEglError("eglCreatePbufferSurface"); 435 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 436 Log.w(TAG, "Surface abandoned, skipping...", e); 437 } 438 } 439 mPBufferPixels = ByteBuffer.allocateDirect(maxLength * PBUFFER_PIXEL_BYTES) 440 .order(ByteOrder.nativeOrder()); 441 } 442 443 private void releaseEGLContext() { 444 if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 445 EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 446 EGL14.EGL_NO_CONTEXT); 447 dumpGlTiming(); 448 if (mSurfaces != null) { 449 for (EGLSurfaceHolder holder : mSurfaces) { 450 if (holder.eglSurface != null) { 451 EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface); 452 } 453 } 454 } 455 if (mConversionSurfaces != null) { 456 for (EGLSurfaceHolder holder : mConversionSurfaces) { 457 if (holder.eglSurface != null) { 458 EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface); 459 } 460 } 461 } 462 EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); 463 EGL14.eglReleaseThread(); 464 EGL14.eglTerminate(mEGLDisplay); 465 } 466 467 mConfigs = null; 468 mEGLDisplay = EGL14.EGL_NO_DISPLAY; 469 mEGLContext = EGL14.EGL_NO_CONTEXT; 470 clearState(); 471 } 472 473 private void makeCurrent(EGLSurface surface) { 474 EGL14.eglMakeCurrent(mEGLDisplay, surface, surface, mEGLContext); 475 checkEglError("makeCurrent"); 476 } 477 478 private boolean swapBuffers(EGLSurface surface) { 479 boolean result = EGL14.eglSwapBuffers(mEGLDisplay, surface); 480 checkEglError("swapBuffers"); 481 return result; 482 } 483 484 private void checkEglError(String msg) { 485 int error; 486 if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 487 throw new IllegalStateException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 488 } 489 } 490 491 private void checkGlError(String msg) { 492 int error; 493 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 494 throw new IllegalStateException(msg + ": GLES20 error: 0x" + Integer.toHexString(error)); 495 } 496 } 497 498 /** 499 * Save a measurement dump to disk, in 500 * {@code /sdcard/CameraLegacy/durations_<time>_<width1>x<height1>_...txt} 501 */ 502 private void dumpGlTiming() { 503 if (mPerfMeasurer == null) return; 504 505 File legacyStorageDir = new File(Environment.getExternalStorageDirectory(), "CameraLegacy"); 506 if (!legacyStorageDir.exists()){ 507 if (!legacyStorageDir.mkdirs()){ 508 Log.e(TAG, "Failed to create directory for data dump"); 509 return; 510 } 511 } 512 513 StringBuilder path = new StringBuilder(legacyStorageDir.getPath()); 514 path.append(File.separator); 515 path.append("durations_"); 516 517 Time now = new Time(); 518 now.setToNow(); 519 path.append(now.format2445()); 520 path.append("_S"); 521 for (EGLSurfaceHolder surface : mSurfaces) { 522 path.append(String.format("_%d_%d", surface.width, surface.height)); 523 } 524 path.append("_C"); 525 for (EGLSurfaceHolder surface : mConversionSurfaces) { 526 path.append(String.format("_%d_%d", surface.width, surface.height)); 527 } 528 path.append(".txt"); 529 mPerfMeasurer.dumpPerformanceData(path.toString()); 530 } 531 532 private void setupGlTiming() { 533 if (PerfMeasurement.isGlTimingSupported()) { 534 Log.d(TAG, "Enabling GL performance measurement"); 535 mPerfMeasurer = new PerfMeasurement(); 536 } else { 537 Log.d(TAG, "GL performance measurement not supported on this device"); 538 mPerfMeasurer = null; 539 } 540 } 541 542 private void beginGlTiming() { 543 if (mPerfMeasurer == null) return; 544 mPerfMeasurer.startTimer(); 545 } 546 547 private void addGlTimestamp(long timestamp) { 548 if (mPerfMeasurer == null) return; 549 mPerfMeasurer.addTimestamp(timestamp); 550 } 551 552 private void endGlTiming() { 553 if (mPerfMeasurer == null) return; 554 mPerfMeasurer.stopTimer(); 555 } 556 557 /** 558 * Return the surface texture to draw to - this is the texture use to when producing output 559 * surface buffers. 560 * 561 * @return a {@link SurfaceTexture}. 562 */ 563 public SurfaceTexture getSurfaceTexture() { 564 return mSurfaceTexture; 565 } 566 567 /** 568 * Set a collection of output {@link Surface}s that can be drawn to. 569 * 570 * @param surfaces a {@link Collection} of surfaces. 571 */ 572 public void configureSurfaces(Collection<Surface> surfaces) { 573 releaseEGLContext(); 574 575 if (surfaces == null || surfaces.size() == 0) { 576 Log.w(TAG, "No output surfaces configured for GL drawing."); 577 return; 578 } 579 580 for (Surface s : surfaces) { 581 // If pixel conversions aren't handled by egl, use a pbuffer 582 try { 583 if (LegacyCameraDevice.needsConversion(s)) { 584 // Always override to YV12 output for YUV surface formats. 585 LegacyCameraDevice.setSurfaceFormat(s, ImageFormat.YV12); 586 EGLSurfaceHolder holder = new EGLSurfaceHolder(); 587 holder.surface = s; 588 mConversionSurfaces.add(holder); 589 } else { 590 EGLSurfaceHolder holder = new EGLSurfaceHolder(); 591 holder.surface = s; 592 mSurfaces.add(holder); 593 } 594 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 595 Log.w(TAG, "Surface abandoned, skipping configuration... ", e); 596 } 597 } 598 599 // Set up egl display 600 configureEGLContext(); 601 602 // Set up regular egl surfaces if needed 603 if (mSurfaces.size() > 0) { 604 configureEGLOutputSurfaces(mSurfaces); 605 } 606 607 // Set up pbuffer surface if needed 608 if (mConversionSurfaces.size() > 0) { 609 configureEGLPbufferSurfaces(mConversionSurfaces); 610 } 611 makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface : 612 mConversionSurfaces.get(0).eglSurface); 613 initializeGLState(); 614 mSurfaceTexture = new SurfaceTexture(getTextureId()); 615 616 // Set up performance tracking if enabled 617 if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) { 618 setupGlTiming(); 619 } 620 } 621 622 /** 623 * Draw the current buffer in the {@link SurfaceTexture} returned from 624 * {@link #getSurfaceTexture()} into the set of target {@link Surface}s 625 * in the next request from the given {@link CaptureCollector}, or drop 626 * the frame if none is available. 627 * 628 * <p> 629 * Any {@link Surface}s targeted must be a subset of the {@link Surface}s 630 * set in the last {@link #configureSurfaces(java.util.Collection)} call. 631 * </p> 632 * 633 * @param targetCollector the surfaces to draw to. 634 */ 635 public void drawIntoSurfaces(CaptureCollector targetCollector) { 636 if ((mSurfaces == null || mSurfaces.size() == 0) 637 && (mConversionSurfaces == null || mConversionSurfaces.size() == 0)) { 638 return; 639 } 640 641 boolean doTiming = targetCollector.hasPendingPreviewCaptures(); 642 checkGlError("before updateTexImage"); 643 644 if (doTiming) { 645 beginGlTiming(); 646 } 647 648 mSurfaceTexture.updateTexImage(); 649 650 long timestamp = mSurfaceTexture.getTimestamp(); 651 652 Pair<RequestHolder, Long> captureHolder = targetCollector.previewCaptured(timestamp); 653 654 // No preview request queued, drop frame. 655 if (captureHolder == null) { 656 if (DEBUG) { 657 Log.d(TAG, "Dropping preview frame."); 658 } 659 if (doTiming) { 660 endGlTiming(); 661 } 662 return; 663 } 664 665 RequestHolder request = captureHolder.first; 666 667 Collection<Surface> targetSurfaces = request.getHolderTargets(); 668 if (doTiming) { 669 addGlTimestamp(timestamp); 670 } 671 672 List<Long> targetSurfaceIds = LegacyCameraDevice.getSurfaceIds(targetSurfaces); 673 for (EGLSurfaceHolder holder : mSurfaces) { 674 if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) { 675 makeCurrent(holder.eglSurface); 676 try { 677 LegacyCameraDevice.setSurfaceDimens(holder.surface, holder.width, 678 holder.height); 679 LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second); 680 drawFrame(mSurfaceTexture, holder.width, holder.height); 681 swapBuffers(holder.eglSurface); 682 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 683 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 684 } 685 } 686 } 687 for (EGLSurfaceHolder holder : mConversionSurfaces) { 688 if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) { 689 makeCurrent(holder.eglSurface); 690 drawFrame(mSurfaceTexture, holder.width, holder.height); 691 mPBufferPixels.clear(); 692 GLES20.glReadPixels(/*x*/ 0, /*y*/ 0, holder.width, holder.height, 693 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPBufferPixels); 694 checkGlError("glReadPixels"); 695 696 try { 697 int format = LegacyCameraDevice.detectSurfaceType(holder.surface); 698 LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second); 699 LegacyCameraDevice.produceFrame(holder.surface, mPBufferPixels.array(), 700 holder.width, holder.height, format); 701 swapBuffers(holder.eglSurface); 702 } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) { 703 Log.w(TAG, "Surface abandoned, dropping frame. ", e); 704 } 705 } 706 } 707 targetCollector.previewProduced(); 708 709 if (doTiming) { 710 endGlTiming(); 711 } 712 } 713 714 /** 715 * Clean up the current GL context. 716 */ 717 public void cleanupEGLContext() { 718 releaseEGLContext(); 719 } 720 721 /** 722 * Drop all current GL operations on the floor. 723 */ 724 public void flush() { 725 // TODO: implement flush 726 Log.e(TAG, "Flush not yet implemented."); 727 } 728} 729