1/* 2 * Copyright 2018 Google Inc. All rights reserved. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package androidx.heifwriter; 18 19import android.graphics.Bitmap; 20import android.graphics.Canvas; 21import android.graphics.Rect; 22import android.graphics.SurfaceTexture; 23import android.media.Image; 24import android.media.MediaCodec; 25import android.media.MediaCodec.BufferInfo; 26import android.media.MediaCodec.CodecException; 27import android.media.MediaCodecInfo; 28import android.media.MediaCodecInfo.CodecCapabilities; 29import android.media.MediaFormat; 30import android.opengl.GLES20; 31import android.os.Handler; 32import android.os.HandlerThread; 33import android.os.Looper; 34import android.os.Process; 35import android.util.Log; 36import android.util.Range; 37import android.view.Surface; 38 39import androidx.annotation.IntDef; 40import androidx.annotation.NonNull; 41import androidx.annotation.Nullable; 42 43import java.io.IOException; 44import java.lang.annotation.Retention; 45import java.lang.annotation.RetentionPolicy; 46import java.nio.ByteBuffer; 47import java.util.ArrayList; 48 49/** 50 * This class encodes images into HEIF-compatible samples using HEVC encoder. 51 * 52 * It currently supports three input modes: {@link #INPUT_MODE_BUFFER}, 53 * {@link #INPUT_MODE_SURFACE}, or {@link #INPUT_MODE_BITMAP}. 54 * 55 * The output format and samples are sent back in {@link 56 * Callback#onOutputFormatChanged(HeifEncoder, MediaFormat)} and {@link 57 * Callback#onDrainOutputBuffer(HeifEncoder, ByteBuffer)}. If the client 58 * requests to use grid, each tile will be sent back individually. 59 * 60 * HeifEncoder is made a separate class from {@link HeifWriter}, as some more 61 * advanced use cases might want to build solutions on top of the HeifEncoder directly. 62 * (eg. mux still images and video tracks into a single container). 63 * 64 * @hide 65 */ 66public final class HeifEncoder implements AutoCloseable, 67 SurfaceTexture.OnFrameAvailableListener { 68 private static final String TAG = "HeifEncoder"; 69 private static final boolean DEBUG = false; 70 71 private static final int GRID_WIDTH = 512; 72 private static final int GRID_HEIGHT = 512; 73 private static final double MAX_COMPRESS_RATIO = 0.25f; 74 private static final int INPUT_BUFFER_POOL_SIZE = 2; 75 76 private MediaCodec mEncoder; 77 78 private final Callback mCallback; 79 private final HandlerThread mHandlerThread; 80 private final Handler mHandler; 81 private final @InputMode int mInputMode; 82 83 private final int mWidth; 84 private final int mHeight; 85 private final int mGridWidth; 86 private final int mGridHeight; 87 private final int mGridRows; 88 private final int mGridCols; 89 private final int mNumTiles; 90 91 private int mInputIndex; 92 private boolean mInputEOS; 93 private final Rect mSrcRect; 94 private final Rect mDstRect; 95 private ByteBuffer mCurrentBuffer; 96 private final ArrayList<ByteBuffer> mEmptyBuffers = new ArrayList<>(); 97 private final ArrayList<ByteBuffer> mFilledBuffers = new ArrayList<>(); 98 private final ArrayList<Integer> mCodecInputBuffers = new ArrayList<>(); 99 100 // Helper for tracking EOS when surface is used 101 private SurfaceEOSTracker mEOSTracker; 102 103 // Below variables are to handle GL copy from client's surface 104 // to encoder surface when tiles are used. 105 private SurfaceTexture mInputTexture; 106 private Surface mInputSurface; 107 private Surface mEncoderSurface; 108 private EglWindowSurface mEncoderEglSurface; 109 private EglRectBlt mRectBlt; 110 private int mTextureId; 111 private final float[] mTmpMatrix = new float[16]; 112 113 public static final int INPUT_MODE_BUFFER = HeifWriter.INPUT_MODE_BUFFER; 114 public static final int INPUT_MODE_SURFACE = HeifWriter.INPUT_MODE_SURFACE; 115 public static final int INPUT_MODE_BITMAP = HeifWriter.INPUT_MODE_BITMAP; 116 @IntDef({ 117 INPUT_MODE_BUFFER, 118 INPUT_MODE_SURFACE, 119 INPUT_MODE_BITMAP, 120 }) 121 @Retention(RetentionPolicy.SOURCE) 122 public @interface InputMode {} 123 124 public static abstract class Callback { 125 /** 126 * Called when the output format has changed. 127 * 128 * @param encoder The HeifEncoder object. 129 * @param format The new output format. 130 */ 131 public abstract void onOutputFormatChanged( 132 @NonNull HeifEncoder encoder, @NonNull MediaFormat format); 133 134 /** 135 * Called when an output buffer becomes available. 136 * 137 * @param encoder The HeifEncoder object. 138 * @param byteBuffer the available output buffer. 139 */ 140 public abstract void onDrainOutputBuffer( 141 @NonNull HeifEncoder encoder, @NonNull ByteBuffer byteBuffer); 142 143 /** 144 * Called when encoding reached the end of stream without error. 145 * 146 * @param encoder The HeifEncoder object. 147 */ 148 public abstract void onComplete(@NonNull HeifEncoder encoder); 149 150 /** 151 * Called when encoding hits an error. 152 * 153 * @param encoder The HeifEncoder object. 154 * @param e The exception that the codec reported. 155 */ 156 public abstract void onError(@NonNull HeifEncoder encoder, @NonNull CodecException e); 157 } 158 159 /** 160 * Configure the heif encoding session. Should only be called once. 161 * 162 * @param width Width of the image. 163 * @param height Height of the image. 164 * @param useGrid Whether to encode image into tiles. If enabled, tile size will be 165 * automatically chosen. 166 * @param quality A number between 0 and 100 (inclusive), with 100 indicating the best quality 167 * supported by this implementation (which often results in larger file size). 168 * @param inputMode The input type of this encoding session. 169 * @param handler If not null, client will receive all callbacks on the handler's looper. 170 * Otherwise, client will receive callbacks on a looper created by us. 171 * @param cb The callback to receive various messages from the heif encoder. 172 */ 173 public HeifEncoder(int width, int height, boolean useGrid, 174 int quality, @InputMode int inputMode, 175 @Nullable Handler handler, @NonNull Callback cb) throws IOException { 176 if (DEBUG) Log.d(TAG, "width: " + width + ", height: " + height + 177 ", useGrid: " + useGrid + ", quality: " + quality + ", inputMode: " + inputMode); 178 179 if (width < 0 || height < 0 || quality < 0 || quality > 100) { 180 throw new IllegalArgumentException("invalid encoder inputs"); 181 } 182 183 // Disable grid if the image is too small 184 useGrid &= (width > GRID_WIDTH || height > GRID_HEIGHT); 185 186 boolean useHeicEncoder = false; 187 MediaCodecInfo.CodecCapabilities caps = null; 188 try { 189 mEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC); 190 caps = mEncoder.getCodecInfo().getCapabilitiesForType( 191 MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC); 192 // If the HEIC encoder can't support the size, fall back to HEVC encoder. 193 if (!caps.getVideoCapabilities().isSizeSupported(width, height)) { 194 mEncoder.release(); 195 mEncoder = null; 196 throw new Exception(); 197 } 198 useHeicEncoder = true; 199 } catch (Exception e) { 200 mEncoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_HEVC); 201 caps = mEncoder.getCodecInfo().getCapabilitiesForType(MediaFormat.MIMETYPE_VIDEO_HEVC); 202 // Always enable grid if the size is too large for the HEVC encoder 203 useGrid |= !caps.getVideoCapabilities().isSizeSupported(width, height); 204 } 205 206 mInputMode = inputMode; 207 208 mCallback = cb; 209 210 Looper looper = (handler != null) ? handler.getLooper() : null; 211 if (looper == null) { 212 mHandlerThread = new HandlerThread("HeifEncoderThread", 213 Process.THREAD_PRIORITY_FOREGROUND); 214 mHandlerThread.start(); 215 looper = mHandlerThread.getLooper(); 216 } else { 217 mHandlerThread = null; 218 } 219 mHandler = new Handler(looper); 220 boolean useSurfaceInternally = 221 (inputMode == INPUT_MODE_SURFACE) || (inputMode == INPUT_MODE_BITMAP); 222 int colorFormat = useSurfaceInternally ? CodecCapabilities.COLOR_FormatSurface : 223 CodecCapabilities.COLOR_FormatYUV420Flexible; 224 225 mWidth = width; 226 mHeight = height; 227 228 int gridWidth, gridHeight, gridRows, gridCols; 229 230 if (useGrid) { 231 gridWidth = GRID_WIDTH; 232 gridHeight = GRID_HEIGHT; 233 gridRows = (height + GRID_HEIGHT - 1) / GRID_HEIGHT; 234 gridCols = (width + GRID_WIDTH - 1) / GRID_WIDTH; 235 } else { 236 gridWidth = mWidth; 237 gridHeight = mHeight; 238 gridRows = 1; 239 gridCols = 1; 240 } 241 242 MediaFormat codecFormat; 243 if (useHeicEncoder) { 244 codecFormat = MediaFormat.createVideoFormat( 245 MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC, mWidth, mHeight); 246 } else { 247 codecFormat = MediaFormat.createVideoFormat( 248 MediaFormat.MIMETYPE_VIDEO_HEVC, gridWidth, gridHeight); 249 } 250 251 if (useGrid) { 252 codecFormat.setInteger(MediaFormat.KEY_TILE_WIDTH, gridWidth); 253 codecFormat.setInteger(MediaFormat.KEY_TILE_HEIGHT, gridHeight); 254 codecFormat.setInteger(MediaFormat.KEY_GRID_COLUMNS, gridCols); 255 codecFormat.setInteger(MediaFormat.KEY_GRID_ROWS, gridRows); 256 } 257 258 if (useHeicEncoder) { 259 mGridWidth = width; 260 mGridHeight = height; 261 mGridRows = 1; 262 mGridCols = 1; 263 } else { 264 mGridWidth = gridWidth; 265 mGridHeight = gridHeight; 266 mGridRows = gridRows; 267 mGridCols = gridCols; 268 } 269 mNumTiles = mGridRows * mGridCols; 270 271 codecFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 0); 272 codecFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 273 codecFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mNumTiles); 274 codecFormat.setInteger(MediaFormat.KEY_CAPTURE_RATE, mNumTiles * 30); 275 276 MediaCodecInfo.EncoderCapabilities encoderCaps = caps.getEncoderCapabilities(); 277 278 if (encoderCaps.isBitrateModeSupported( 279 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ)) { 280 Log.d(TAG, "Setting bitrate mode to constant quality"); 281 Range<Integer> qualityRange = encoderCaps.getQualityRange(); 282 Log.d(TAG, "Quality range: " + qualityRange); 283 codecFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, 284 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CQ); 285 codecFormat.setInteger(MediaFormat.KEY_QUALITY, (int) (qualityRange.getLower() + 286 (qualityRange.getUpper() - qualityRange.getLower()) * quality / 100.0)); 287 } else { 288 if (encoderCaps.isBitrateModeSupported( 289 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR)) { 290 Log.d(TAG, "Setting bitrate mode to constant bitrate"); 291 codecFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, 292 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR); 293 } else { // assume VBR 294 Log.d(TAG, "Setting bitrate mode to variable bitrate"); 295 codecFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, 296 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR); 297 } 298 // Calculate the bitrate based on image dimension, max compression ratio and quality. 299 // Note that we set the frame rate to the number of tiles, so the bitrate would be the 300 // intended bits for one image. 301 int bitrate = (int) (width * height * 1.5 * 8 * MAX_COMPRESS_RATIO * quality / 100.0f); 302 codecFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); 303 } 304 305 mEncoder.setCallback(new EncoderCallback(), mHandler); 306 mEncoder.configure(codecFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 307 308 if (useSurfaceInternally) { 309 mEncoderSurface = mEncoder.createInputSurface(); 310 311 boolean copyTiles = (mNumTiles > 1); 312 mEOSTracker = new SurfaceEOSTracker(copyTiles); 313 314 if (inputMode == INPUT_MODE_SURFACE) { 315 if (copyTiles) { 316 mEncoderEglSurface = new EglWindowSurface(mEncoderSurface); 317 mEncoderEglSurface.makeCurrent(); 318 319 mRectBlt = new EglRectBlt( 320 new Texture2dProgram((inputMode == INPUT_MODE_BITMAP) 321 ? Texture2dProgram.TEXTURE_2D 322 : Texture2dProgram.TEXTURE_EXT), 323 mWidth, mHeight); 324 325 mTextureId = mRectBlt.createTextureObject(); 326 327 if (inputMode == INPUT_MODE_SURFACE) { 328 // use single buffer mode to block on input 329 mInputTexture = new SurfaceTexture(mTextureId, true); 330 mInputTexture.setOnFrameAvailableListener(this); 331 mInputTexture.setDefaultBufferSize(mWidth, mHeight); 332 mInputSurface = new Surface(mInputTexture); 333 } 334 335 // make uncurrent since onFrameAvailable could be called on arbituray thread. 336 // making the context current on a different thread will cause error. 337 mEncoderEglSurface.makeUnCurrent(); 338 } else { 339 mInputSurface = mEncoderSurface; 340 } 341 } 342 } else { 343 for (int i = 0; i < INPUT_BUFFER_POOL_SIZE; i++) { 344 mEmptyBuffers.add(ByteBuffer.allocateDirect(mWidth * mHeight * 3 / 2)); 345 } 346 } 347 348 mDstRect = new Rect(0, 0, mGridWidth, mGridHeight); 349 mSrcRect = new Rect(); 350 } 351 352 @Override 353 public void onFrameAvailable(SurfaceTexture surfaceTexture) { 354 synchronized (this) { 355 if (mEncoderEglSurface == null) { 356 return; 357 } 358 359 mEncoderEglSurface.makeCurrent(); 360 361 surfaceTexture.updateTexImage(); 362 surfaceTexture.getTransformMatrix(mTmpMatrix); 363 364 long timestampNs = surfaceTexture.getTimestamp(); 365 366 if (DEBUG) Log.d(TAG, "onFrameAvailable: timestampUs " + (timestampNs / 1000)); 367 368 boolean takeFrame = mEOSTracker.updateLastInputAndEncoderTime(timestampNs, 369 computePresentationTime(mInputIndex + mNumTiles - 1)); 370 371 if (takeFrame) { 372 // Copies from surface texture to encoder inputs using GL. 373 GLES20.glViewport(0, 0, mGridWidth, mGridHeight); 374 375 for (int row = 0; row < mGridRows; row++) { 376 for (int col = 0; col < mGridCols; col++) { 377 int left = col * mGridWidth; 378 int top = row * mGridHeight; 379 mSrcRect.set(left, top, left + mGridWidth, top + mGridHeight); 380 mRectBlt.copyRect(mTextureId, mTmpMatrix, mSrcRect); 381 mEncoderEglSurface.setPresentationTime( 382 1000 * computePresentationTime(mInputIndex++)); 383 mEncoderEglSurface.swapBuffers(); 384 } 385 } 386 } 387 388 surfaceTexture.releaseTexImage(); 389 390 // make uncurrent since the onFrameAvailable could be called on arbituray thread. 391 // making the context current on a different thread will cause error. 392 mEncoderEglSurface.makeUnCurrent(); 393 } 394 } 395 396 /** 397 * Start the encoding process. 398 */ 399 public void start() { 400 mEncoder.start(); 401 } 402 403 /** 404 * Add one YUV buffer to be encoded. This might block if the encoder can't process the input 405 * buffers fast enough. 406 * 407 * After the call returns, the client can reuse the data array. 408 * 409 * @param format The YUV format as defined in {@link android.graphics.ImageFormat}, currently 410 * only support YUV_420_888. 411 * 412 * @param data byte array containing the YUV data. If the format has more than one planes, 413 * they must be concatenated. 414 */ 415 public void addYuvBuffer(int format, @NonNull byte[] data) { 416 if (mInputMode != INPUT_MODE_BUFFER) { 417 throw new IllegalStateException( 418 "addYuvBuffer is only allowed in buffer input mode"); 419 } 420 if (data == null || data.length != mWidth * mHeight * 3 / 2) { 421 throw new IllegalArgumentException("invalid data"); 422 } 423 addYuvBufferInternal(data); 424 } 425 426 /** 427 * Retrieves the input surface for encoding. 428 * 429 * Will only return valid value if configured to use surface input. 430 */ 431 public @NonNull Surface getInputSurface() { 432 if (mInputMode != INPUT_MODE_SURFACE) { 433 throw new IllegalStateException( 434 "getInputSurface is only allowed in surface input mode"); 435 } 436 return mInputSurface; 437 } 438 439 /** 440 * Sets the timestamp (in nano seconds) of the last input frame to encode. Frames with 441 * timestamps larger than the specified value will not be encoded. However, if a frame 442 * already started encoding when this is set, all tiles within that frame will be encoded. 443 * 444 * This method only applies when surface is used. 445 */ 446 public void setEndOfInputStreamTimestamp(long timestampNs) { 447 if (mInputMode != INPUT_MODE_SURFACE) { 448 throw new IllegalStateException( 449 "setEndOfInputStreamTimestamp is only allowed in surface input mode"); 450 } 451 if (mEOSTracker != null) { 452 mEOSTracker.updateInputEOSTime(timestampNs); 453 } 454 } 455 456 /** 457 * Adds one bitmap to be encoded. 458 */ 459 public void addBitmap(@NonNull Bitmap bitmap) { 460 if (mInputMode != INPUT_MODE_BITMAP) { 461 throw new IllegalStateException("addBitmap is only allowed in bitmap input mode"); 462 } 463 464 boolean takeFrame = mEOSTracker.updateLastInputAndEncoderTime( 465 computePresentationTime(mInputIndex), 466 computePresentationTime(mInputIndex + mNumTiles - 1)); 467 468 if (!takeFrame) return; 469 470 synchronized (this) { 471 for (int row = 0; row < mGridRows; row++) { 472 for (int col = 0; col < mGridCols; col++) { 473 int left = col * mGridWidth; 474 int top = row * mGridHeight; 475 mSrcRect.set(left, top, left + mGridWidth, top + mGridHeight); 476 Canvas canvas = mEncoderSurface.lockCanvas(null); 477 canvas.drawBitmap(bitmap, mSrcRect, mDstRect, null); 478 mEncoderSurface.unlockCanvasAndPost(canvas); 479 } 480 } 481 } 482 } 483 484 /** 485 * Sends input EOS to the encoder. Result will be notified asynchronously via 486 * {@link Callback#onComplete(HeifEncoder)} if encoder reaches EOS without error, or 487 * {@link Callback#onError(HeifEncoder, CodecException)} otherwise. 488 */ 489 public void stopAsync() { 490 if (mInputMode == INPUT_MODE_BITMAP) { 491 // here we simply set the EOS timestamp to 0, so that the cut off will be the last 492 // bitmap ever added. 493 mEOSTracker.updateInputEOSTime(0); 494 } else if (mInputMode == INPUT_MODE_BUFFER) { 495 addYuvBufferInternal(null); 496 } 497 } 498 499 /** 500 * Generates the presentation time for input frame N, in microseconds. 501 * The timestamp advances 1 sec for every whole frame. 502 */ 503 private long computePresentationTime(int frameIndex) { 504 return 132 + (long)frameIndex * 1000000 / mNumTiles; 505 } 506 507 /** 508 * Obtains one empty input buffer and copies the data into it. Before input 509 * EOS is sent, this would block until the data is copied. After input EOS 510 * is sent, this would return immediately. 511 */ 512 private void addYuvBufferInternal(@Nullable byte[] data) { 513 ByteBuffer buffer = acquireEmptyBuffer(); 514 if (buffer == null) { 515 return; 516 } 517 buffer.clear(); 518 if (data != null) { 519 buffer.put(data); 520 } 521 buffer.flip(); 522 synchronized (mFilledBuffers) { 523 mFilledBuffers.add(buffer); 524 } 525 mHandler.post(new Runnable() { 526 @Override 527 public void run() { 528 maybeCopyOneTileYUV(); 529 } 530 }); 531 } 532 533 /** 534 * Routine to copy one tile if we have both input and codec buffer available. 535 * 536 * Must be called on the handler looper that also handles the MediaCodec callback. 537 */ 538 private void maybeCopyOneTileYUV() { 539 ByteBuffer currentBuffer; 540 while ((currentBuffer = getCurrentBuffer()) != null && !mCodecInputBuffers.isEmpty()) { 541 int index = mCodecInputBuffers.remove(0); 542 543 // 0-length input means EOS. 544 boolean inputEOS = (mInputIndex % mNumTiles == 0) && (currentBuffer.remaining() == 0); 545 546 if (!inputEOS) { 547 Image image = mEncoder.getInputImage(index); 548 int left = mGridWidth * (mInputIndex % mGridCols); 549 int top = mGridHeight * (mInputIndex / mGridCols % mGridRows); 550 mSrcRect.set(left, top, left + mGridWidth, top + mGridHeight); 551 copyOneTileYUV(currentBuffer, image, mWidth, mHeight, mSrcRect, mDstRect); 552 } 553 554 mEncoder.queueInputBuffer(index, 0, 555 inputEOS ? 0 : mEncoder.getInputBuffer(index).capacity(), 556 computePresentationTime(mInputIndex++), 557 inputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 558 559 if (inputEOS || mInputIndex % mNumTiles == 0) { 560 returnEmptyBufferAndNotify(inputEOS); 561 } 562 } 563 } 564 565 /** 566 * Copies from a rect from src buffer to dst image. 567 * TOOD: This will be replaced by JNI. 568 */ 569 private static void copyOneTileYUV( 570 ByteBuffer srcBuffer, Image dstImage, 571 int srcWidth, int srcHeight, 572 Rect srcRect, Rect dstRect) { 573 if (srcRect.width() != dstRect.width() || srcRect.height() != dstRect.height()) { 574 throw new IllegalArgumentException("src and dst rect size are different!"); 575 } 576 if (srcWidth % 2 != 0 || srcHeight % 2 != 0 || 577 srcRect.left % 2 != 0 || srcRect.top % 2 != 0 || 578 srcRect.right % 2 != 0 || srcRect.bottom % 2 != 0 || 579 dstRect.left % 2 != 0 || dstRect.top % 2 != 0 || 580 dstRect.right % 2 != 0 || dstRect.bottom % 2 != 0) { 581 throw new IllegalArgumentException("src or dst are not aligned!"); 582 } 583 584 Image.Plane[] planes = dstImage.getPlanes(); 585 for (int n = 0; n < planes.length; n++) { 586 ByteBuffer dstBuffer = planes[n].getBuffer(); 587 int colStride = planes[n].getPixelStride(); 588 int copyWidth = Math.min(srcRect.width(), srcWidth - srcRect.left); 589 int copyHeight = Math.min(srcRect.height(), srcHeight - srcRect.top); 590 int srcPlanePos = 0, div = 1; 591 if (n > 0) { 592 div = 2; 593 srcPlanePos = srcWidth * srcHeight * (n + 3) / 4; 594 } 595 for (int i = 0; i < copyHeight / div; i++) { 596 srcBuffer.position(srcPlanePos + 597 (i + srcRect.top / div) * srcWidth / div + srcRect.left / div); 598 dstBuffer.position((i + dstRect.top / div) * planes[n].getRowStride() 599 + dstRect.left * colStride / div); 600 601 for (int j = 0; j < copyWidth / div; j++) { 602 dstBuffer.put(srcBuffer.get()); 603 if (colStride > 1 && j != copyWidth / div - 1) { 604 dstBuffer.position(dstBuffer.position() + colStride - 1); 605 } 606 } 607 } 608 } 609 } 610 611 private ByteBuffer acquireEmptyBuffer() { 612 synchronized (mEmptyBuffers) { 613 // wait for an empty input buffer first 614 while (!mInputEOS && mEmptyBuffers.isEmpty()) { 615 try { 616 mEmptyBuffers.wait(); 617 } catch (InterruptedException e) {} 618 } 619 620 // if already EOS, return null to stop further encoding. 621 return mInputEOS ? null : mEmptyBuffers.remove(0); 622 } 623 } 624 625 /** 626 * Routine to get the current input buffer to copy from. 627 * Only called on callback handler thread. 628 */ 629 private ByteBuffer getCurrentBuffer() { 630 if (!mInputEOS && mCurrentBuffer == null) { 631 synchronized (mFilledBuffers) { 632 mCurrentBuffer = mFilledBuffers.isEmpty() ? 633 null : mFilledBuffers.remove(0); 634 } 635 } 636 return mInputEOS ? null : mCurrentBuffer; 637 } 638 639 /** 640 * Routine to put the consumed input buffer back into the empty buffer pool. 641 * Only called on callback handler thread. 642 */ 643 private void returnEmptyBufferAndNotify(boolean inputEOS) { 644 synchronized (mEmptyBuffers) { 645 mInputEOS |= inputEOS; 646 mEmptyBuffers.add(mCurrentBuffer); 647 mEmptyBuffers.notifyAll(); 648 } 649 mCurrentBuffer = null; 650 } 651 652 /** 653 * Routine to release all resources. Must be run on the same looper that 654 * handles the MediaCodec callbacks. 655 */ 656 private void stopInternal() { 657 if (DEBUG) Log.d(TAG, "stopInternal"); 658 659 // after start, mEncoder is only accessed on handler, so no need to sync 660 if (mEncoder != null) { 661 mEncoder.stop(); 662 mEncoder.release(); 663 mEncoder = null; 664 } 665 666 // unblock the addBuffer() if we're tearing down before EOS is sent. 667 synchronized (mEmptyBuffers) { 668 mInputEOS = true; 669 mEmptyBuffers.notifyAll(); 670 } 671 672 synchronized(this) { 673 if (mRectBlt != null) { 674 mRectBlt.release(false); 675 mRectBlt = null; 676 } 677 678 if (mEncoderEglSurface != null) { 679 // Note that this frees mEncoderSurface too. If mEncoderEglSurface is not 680 // there, client is responsible to release the input surface it got from us, 681 // we don't release mEncoderSurface here. 682 mEncoderEglSurface.release(); 683 mEncoderEglSurface = null; 684 } 685 686 if (mInputTexture != null) { 687 mInputTexture.release(); 688 mInputTexture = null; 689 } 690 } 691 } 692 693 /** 694 * This class handles EOS for surface or bitmap inputs. 695 * 696 * When encoding from surface or bitmap, we can't call {@link MediaCodec#signalEndOfInputStream()} 697 * immediately after input is drawn, since this could drop all pending frames in the 698 * buffer queue. When there are tiles, this could leave us a partially encoded image. 699 * 700 * So here we track the EOS status by timestamps, and only signal EOS to the encoder 701 * when we collected all images we need. 702 * 703 * Since this is updated from multiple threads ({@link #setEndOfInputStreamTimestamp(long)}, 704 * {@link EncoderCallback#onOutputBufferAvailable(MediaCodec, int, BufferInfo)}, 705 * {@link #addBitmap(Bitmap)} and {@link #onFrameAvailable(SurfaceTexture)}), it must be fully 706 * synchronized. 707 * 708 * Note that when buffer input is used, the EOS flag is set in 709 * {@link EncoderCallback#onInputBufferAvailable(MediaCodec, int)} and this class is not used. 710 */ 711 private class SurfaceEOSTracker { 712 private static final boolean DEBUG_EOS = false; 713 714 final boolean mCopyTiles; 715 long mInputEOSTimeNs = -1; 716 long mLastInputTimeNs = -1; 717 long mEncoderEOSTimeUs = -1; 718 long mLastEncoderTimeUs = -1; 719 long mLastOutputTimeUs = -1; 720 boolean mSignaled; 721 722 SurfaceEOSTracker(boolean copyTiles) { 723 mCopyTiles = copyTiles; 724 } 725 726 synchronized void updateInputEOSTime(long timestampNs) { 727 if (DEBUG_EOS) Log.d(TAG, "updateInputEOSTime: " + timestampNs); 728 729 if (mCopyTiles) { 730 if (mInputEOSTimeNs < 0) { 731 mInputEOSTimeNs = timestampNs; 732 } 733 } else { 734 if (mEncoderEOSTimeUs < 0) { 735 mEncoderEOSTimeUs = timestampNs / 1000; 736 } 737 } 738 updateEOSLocked(); 739 } 740 741 synchronized boolean updateLastInputAndEncoderTime(long inputTimeNs, long encoderTimeUs) { 742 if (DEBUG_EOS) Log.d(TAG, 743 "updateLastInputAndEncoderTime: " + inputTimeNs + ", " + encoderTimeUs); 744 745 boolean shouldTakeFrame = mInputEOSTimeNs < 0 || inputTimeNs <= mInputEOSTimeNs; 746 if (shouldTakeFrame) { 747 mLastEncoderTimeUs = encoderTimeUs; 748 } 749 mLastInputTimeNs = inputTimeNs; 750 updateEOSLocked(); 751 return shouldTakeFrame; 752 } 753 754 synchronized void updateLastOutputTime(long outputTimeUs) { 755 if (DEBUG_EOS) Log.d(TAG, "updateLastOutputTime: " + outputTimeUs); 756 757 mLastOutputTimeUs = outputTimeUs; 758 updateEOSLocked(); 759 } 760 761 private void updateEOSLocked() { 762 if (mSignaled) { 763 return; 764 } 765 if (mEncoderEOSTimeUs < 0) { 766 if (mInputEOSTimeNs >= 0 && mLastInputTimeNs >= mInputEOSTimeNs) { 767 if (mLastEncoderTimeUs < 0) { 768 doSignalEOSLocked(); 769 return; 770 } 771 // mEncoderEOSTimeUs tracks the timestamp of the last output buffer we 772 // will wait for. When that buffer arrives, encoder will be signalled EOS. 773 mEncoderEOSTimeUs = mLastEncoderTimeUs; 774 if (DEBUG_EOS) Log.d(TAG, 775 "updateEOSLocked: mEncoderEOSTimeUs " + mEncoderEOSTimeUs); 776 } 777 } 778 if (mEncoderEOSTimeUs >= 0 && mEncoderEOSTimeUs <= mLastOutputTimeUs) { 779 doSignalEOSLocked(); 780 } 781 } 782 783 private void doSignalEOSLocked() { 784 if (DEBUG_EOS) Log.d(TAG, "doSignalEOSLocked"); 785 786 mHandler.post(new Runnable() { 787 @Override public void run() { 788 if (mEncoder != null) { 789 mEncoder.signalEndOfInputStream(); 790 } 791 } 792 }); 793 794 mSignaled = true; 795 } 796 } 797 798 /** 799 * MediaCodec callback for HEVC encoding. 800 */ 801 private class EncoderCallback extends MediaCodec.Callback { 802 private boolean mOutputEOS; 803 804 @Override 805 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 806 if (codec != mEncoder) return; 807 808 if (DEBUG) Log.d(TAG, "onOutputFormatChanged: " + format); 809 810 if (!MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC.equals( 811 format.getString(MediaFormat.KEY_MIME))) { 812 format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_IMAGE_ANDROID_HEIC); 813 format.setInteger(MediaFormat.KEY_WIDTH, mWidth); 814 format.setInteger(MediaFormat.KEY_HEIGHT, mHeight); 815 816 if (mNumTiles > 1) { 817 format.setInteger(MediaFormat.KEY_TILE_WIDTH, mGridWidth); 818 format.setInteger(MediaFormat.KEY_TILE_HEIGHT, mGridHeight); 819 format.setInteger(MediaFormat.KEY_GRID_ROWS, mGridRows); 820 format.setInteger(MediaFormat.KEY_GRID_COLUMNS, mGridCols); 821 } 822 } 823 824 mCallback.onOutputFormatChanged(HeifEncoder.this, format); 825 } 826 827 @Override 828 public void onInputBufferAvailable(MediaCodec codec, int index) { 829 if (codec != mEncoder || mInputEOS) return; 830 831 if (DEBUG) Log.d(TAG, "onInputBufferAvailable: " + index); 832 mCodecInputBuffers.add(index); 833 maybeCopyOneTileYUV(); 834 } 835 836 @Override 837 public void onOutputBufferAvailable(MediaCodec codec, int index, BufferInfo info) { 838 if (codec != mEncoder || mOutputEOS) return; 839 840 if (DEBUG) { 841 Log.d(TAG, "onOutputBufferAvailable: " + index 842 + ", time " + info.presentationTimeUs 843 + ", size " + info.size 844 + ", flags " + info.flags); 845 } 846 847 if ((info.size > 0) && ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0)) { 848 ByteBuffer outputBuffer = codec.getOutputBuffer(index); 849 850 // reset position as addBuffer() modifies it 851 outputBuffer.position(info.offset); 852 outputBuffer.limit(info.offset + info.size); 853 854 if (mEOSTracker != null) { 855 mEOSTracker.updateLastOutputTime(info.presentationTimeUs); 856 } 857 858 mCallback.onDrainOutputBuffer(HeifEncoder.this, outputBuffer); 859 } 860 861 mOutputEOS |= ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0); 862 863 codec.releaseOutputBuffer(index, false); 864 865 if (mOutputEOS) { 866 stopAndNotify(null); 867 } 868 } 869 870 @Override 871 public void onError(MediaCodec codec, CodecException e) { 872 if (codec != mEncoder) return; 873 874 Log.e(TAG, "onError: " + e); 875 stopAndNotify(e); 876 } 877 878 private void stopAndNotify(@Nullable CodecException e) { 879 stopInternal(); 880 if (e == null) { 881 mCallback.onComplete(HeifEncoder.this); 882 } else { 883 mCallback.onError(HeifEncoder.this, e); 884 } 885 } 886 } 887 888 @Override 889 public void close() { 890 // unblock the addBuffer() if we're tearing down before EOS is sent. 891 synchronized (mEmptyBuffers) { 892 mInputEOS = true; 893 mEmptyBuffers.notifyAll(); 894 } 895 896 mHandler.postAtFrontOfQueue(new Runnable() { 897 @Override 898 public void run() { 899 stopInternal(); 900 } 901 }); 902 } 903}