1/*
2 * libjingle
3 * Copyright 2013 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28package org.webrtc;
29
30import android.annotation.TargetApi;
31import android.media.MediaCodec;
32import android.media.MediaCodecInfo.CodecCapabilities;
33import android.media.MediaCodecInfo;
34import android.media.MediaCodecList;
35import android.media.MediaFormat;
36import android.opengl.GLES20;
37import android.os.Build;
38import android.os.Bundle;
39import android.view.Surface;
40
41import org.webrtc.Logging;
42
43import java.nio.ByteBuffer;
44import java.util.Arrays;
45import java.util.List;
46import java.util.concurrent.CountDownLatch;
47import java.util.concurrent.TimeUnit;
48
49// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
50// This class is an implementation detail of the Java PeerConnection API.
51@TargetApi(19)
52@SuppressWarnings("deprecation")
53public class MediaCodecVideoEncoder {
54  // This class is constructed, operated, and destroyed by its C++ incarnation,
55  // so the class and its methods have non-public visibility.  The API this
56  // class exposes aims to mimic the webrtc::VideoEncoder API as closely as
57  // possibly to minimize the amount of translation work necessary.
58
59  private static final String TAG = "MediaCodecVideoEncoder";
60
61  // Tracks webrtc::VideoCodecType.
62  public enum VideoCodecType {
63    VIDEO_CODEC_VP8,
64    VIDEO_CODEC_VP9,
65    VIDEO_CODEC_H264
66  }
67
68  private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
69  private static final int DEQUEUE_TIMEOUT = 0;  // Non-blocking, no wait.
70  // Active running encoder instance. Set in initEncode() (called from native code)
71  // and reset to null in release() call.
72  private static MediaCodecVideoEncoder runningInstance = null;
73  private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
74  private static int codecErrors = 0;
75
76  private Thread mediaCodecThread;
77  private MediaCodec mediaCodec;
78  private ByteBuffer[] outputBuffers;
79  private EglBase14 eglBase;
80  private int width;
81  private int height;
82  private Surface inputSurface;
83  private GlRectDrawer drawer;
84  private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
85  private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
86  private static final String H264_MIME_TYPE = "video/avc";
87  // List of supported HW VP8 codecs.
88  private static final String[] supportedVp8HwCodecPrefixes =
89    {"OMX.qcom.", "OMX.Intel." };
90  // List of supported HW VP9 decoders.
91  private static final String[] supportedVp9HwCodecPrefixes =
92    {"OMX.qcom."};
93  // List of supported HW H.264 codecs.
94  private static final String[] supportedH264HwCodecPrefixes =
95    {"OMX.qcom." };
96  // List of devices with poor H.264 encoder quality.
97  private static final String[] H264_HW_EXCEPTION_MODELS = new String[] {
98    // HW H.264 encoder on below devices has poor bitrate control - actual
99    // bitrates deviates a lot from the target value.
100    "SAMSUNG-SGH-I337",
101    "Nexus 7",
102    "Nexus 4"
103  };
104
105  // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
106  // in OMX_Video.h
107  private static final int VIDEO_ControlRateVariable = 1;
108  private static final int VIDEO_ControlRateConstant = 2;
109  // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
110  // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
111  private static final int
112    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
113  // Allowable color formats supported by codec - in order of preference.
114  private static final int[] supportedColorList = {
115    CodecCapabilities.COLOR_FormatYUV420Planar,
116    CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
117    CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
118    COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
119  };
120  private static final int[] supportedSurfaceColorList = {
121    CodecCapabilities.COLOR_FormatSurface
122  };
123  private VideoCodecType type;
124  private int colorFormat;  // Used by native code.
125
126  // SPS and PPS NALs (Config frame) for H.264.
127  private ByteBuffer configData = null;
128
129  // MediaCodec error handler - invoked when critical error happens which may prevent
130  // further use of media codec API. Now it means that one of media codec instances
131  // is hanging and can no longer be used in the next call.
132  public static interface MediaCodecVideoEncoderErrorCallback {
133    void onMediaCodecVideoEncoderCriticalError(int codecErrors);
134  }
135
136  public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
137    Logging.d(TAG, "Set error callback");
138    MediaCodecVideoEncoder.errorCallback = errorCallback;
139  }
140
141  // Helper struct for findHwEncoder() below.
142  private static class EncoderProperties {
143    public EncoderProperties(String codecName, int colorFormat) {
144      this.codecName = codecName;
145      this.colorFormat = colorFormat;
146    }
147    public final String codecName; // OpenMax component name for HW codec.
148    public final int colorFormat;  // Color format supported by codec.
149  }
150
151  private static EncoderProperties findHwEncoder(
152      String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
153    // MediaCodec.setParameters is missing for JB and below, so bitrate
154    // can not be adjusted dynamically.
155    if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
156      return null;
157    }
158
159    // Check if device is in H.264 exception list.
160    if (mime.equals(H264_MIME_TYPE)) {
161      List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
162      if (exceptionModels.contains(Build.MODEL)) {
163        Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
164        return null;
165      }
166    }
167
168    for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
169      MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
170      if (!info.isEncoder()) {
171        continue;
172      }
173      String name = null;
174      for (String mimeType : info.getSupportedTypes()) {
175        if (mimeType.equals(mime)) {
176          name = info.getName();
177          break;
178        }
179      }
180      if (name == null) {
181        continue;  // No HW support in this codec; try the next one.
182      }
183      Logging.v(TAG, "Found candidate encoder " + name);
184
185      // Check if this is supported HW encoder.
186      boolean supportedCodec = false;
187      for (String hwCodecPrefix : supportedHwCodecPrefixes) {
188        if (name.startsWith(hwCodecPrefix)) {
189          supportedCodec = true;
190          break;
191        }
192      }
193      if (!supportedCodec) {
194        continue;
195      }
196
197      CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
198      for (int colorFormat : capabilities.colorFormats) {
199        Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
200      }
201
202      for (int supportedColorFormat : colorList) {
203        for (int codecColorFormat : capabilities.colorFormats) {
204          if (codecColorFormat == supportedColorFormat) {
205            // Found supported HW encoder.
206            Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name +
207                ". Color: 0x" + Integer.toHexString(codecColorFormat));
208            return new EncoderProperties(name, codecColorFormat);
209          }
210        }
211      }
212    }
213    return null;  // No HW encoder.
214  }
215
216  public static boolean isVp8HwSupported() {
217    return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null;
218  }
219
220  public static boolean isVp9HwSupported() {
221    return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null;
222  }
223
224  public static boolean isH264HwSupported() {
225    return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null;
226  }
227
228  public static boolean isVp8HwSupportedUsingTextures() {
229    return findHwEncoder(
230        VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null;
231  }
232
233  public static boolean isVp9HwSupportedUsingTextures() {
234    return findHwEncoder(
235        VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null;
236  }
237
238  public static boolean isH264HwSupportedUsingTextures() {
239    return findHwEncoder(
240        H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null;
241  }
242
243  private void checkOnMediaCodecThread() {
244    if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
245      throw new RuntimeException(
246          "MediaCodecVideoEncoder previously operated on " + mediaCodecThread +
247          " but is now called on " + Thread.currentThread());
248    }
249  }
250
251  public static void printStackTrace() {
252    if (runningInstance != null && runningInstance.mediaCodecThread != null) {
253      StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
254      if (mediaCodecStackTraces.length > 0) {
255        Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
256        for (StackTraceElement stackTrace : mediaCodecStackTraces) {
257          Logging.d(TAG, stackTrace.toString());
258        }
259      }
260    }
261  }
262
263  static MediaCodec createByCodecName(String codecName) {
264    try {
265      // In the L-SDK this call can throw IOException so in order to work in
266      // both cases catch an exception.
267      return MediaCodec.createByCodecName(codecName);
268    } catch (Exception e) {
269      return null;
270    }
271  }
272
273  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
274      EglBase14.Context sharedContext) {
275    final boolean useSurface = sharedContext != null;
276    Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
277        ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
278
279    this.width = width;
280    this.height = height;
281    if (mediaCodecThread != null) {
282      throw new RuntimeException("Forgot to release()?");
283    }
284    EncoderProperties properties = null;
285    String mime = null;
286    int keyFrameIntervalSec = 0;
287    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
288      mime = VP8_MIME_TYPE;
289      properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
290          useSurface ? supportedSurfaceColorList : supportedColorList);
291      keyFrameIntervalSec = 100;
292    } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
293      mime = VP9_MIME_TYPE;
294      properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
295          useSurface ? supportedSurfaceColorList : supportedColorList);
296      keyFrameIntervalSec = 100;
297    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
298      mime = H264_MIME_TYPE;
299      properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
300          useSurface ? supportedSurfaceColorList : supportedColorList);
301      keyFrameIntervalSec = 20;
302    }
303    if (properties == null) {
304      throw new RuntimeException("Can not find HW encoder for " + type);
305    }
306    runningInstance = this; // Encoder is now running and can be queried for stack traces.
307    colorFormat = properties.colorFormat;
308    Logging.d(TAG, "Color format: " + colorFormat);
309
310    mediaCodecThread = Thread.currentThread();
311    try {
312      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
313      format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
314      format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
315      format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
316      format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
317      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
318      Logging.d(TAG, "  Format: " + format);
319      mediaCodec = createByCodecName(properties.codecName);
320      this.type = type;
321      if (mediaCodec == null) {
322        Logging.e(TAG, "Can not create media encoder");
323        return false;
324      }
325      mediaCodec.configure(
326          format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
327
328      if (useSurface) {
329        eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
330        // Create an input surface and keep a reference since we must release the surface when done.
331        inputSurface = mediaCodec.createInputSurface();
332        eglBase.createSurface(inputSurface);
333        drawer = new GlRectDrawer();
334      }
335      mediaCodec.start();
336      outputBuffers = mediaCodec.getOutputBuffers();
337      Logging.d(TAG, "Output buffers: " + outputBuffers.length);
338
339    } catch (IllegalStateException e) {
340      Logging.e(TAG, "initEncode failed", e);
341      return false;
342    }
343    return true;
344  }
345
346  ByteBuffer[]  getInputBuffers() {
347    ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
348    Logging.d(TAG, "Input buffers: " + inputBuffers.length);
349    return inputBuffers;
350  }
351
352  boolean encodeBuffer(
353      boolean isKeyframe, int inputBuffer, int size,
354      long presentationTimestampUs) {
355    checkOnMediaCodecThread();
356    try {
357      if (isKeyframe) {
358        // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
359        // indicate this in queueInputBuffer() below and guarantee _this_ frame
360        // be encoded as a key frame, but sadly that flag is ignored.  Instead,
361        // we request a key frame "soon".
362        Logging.d(TAG, "Sync frame request");
363        Bundle b = new Bundle();
364        b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
365        mediaCodec.setParameters(b);
366      }
367      mediaCodec.queueInputBuffer(
368          inputBuffer, 0, size, presentationTimestampUs, 0);
369      return true;
370    }
371    catch (IllegalStateException e) {
372      Logging.e(TAG, "encodeBuffer failed", e);
373      return false;
374    }
375  }
376
377  boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
378      long presentationTimestampUs) {
379    checkOnMediaCodecThread();
380    try {
381      if (isKeyframe) {
382        Logging.d(TAG, "Sync frame request");
383        Bundle b = new Bundle();
384        b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
385        mediaCodec.setParameters(b);
386      }
387      eglBase.makeCurrent();
388      // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
389      // but it's a workaround for bug webrtc:5147.
390      GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
391      drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
392      eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
393      return true;
394    }
395    catch (RuntimeException e) {
396      Logging.e(TAG, "encodeTexture failed", e);
397      return false;
398    }
399  }
400
401  void release() {
402    Logging.d(TAG, "Java releaseEncoder");
403    checkOnMediaCodecThread();
404
405    // Run Mediacodec stop() and release() on separate thread since sometime
406    // Mediacodec.stop() may hang.
407    final CountDownLatch releaseDone = new CountDownLatch(1);
408
409    Runnable runMediaCodecRelease = new Runnable() {
410      @Override
411      public void run() {
412        try {
413          Logging.d(TAG, "Java releaseEncoder on release thread");
414          mediaCodec.stop();
415          mediaCodec.release();
416          Logging.d(TAG, "Java releaseEncoder on release thread done");
417        } catch (Exception e) {
418          Logging.e(TAG, "Media encoder release failed", e);
419        }
420        releaseDone.countDown();
421      }
422    };
423    new Thread(runMediaCodecRelease).start();
424
425    if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
426      Logging.e(TAG, "Media encoder release timeout");
427      codecErrors++;
428      if (errorCallback != null) {
429        Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
430        errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
431      }
432    }
433
434    mediaCodec = null;
435    mediaCodecThread = null;
436    if (drawer != null) {
437      drawer.release();
438      drawer = null;
439    }
440    if (eglBase != null) {
441      eglBase.release();
442      eglBase = null;
443    }
444    if (inputSurface != null) {
445      inputSurface.release();
446      inputSurface = null;
447    }
448    runningInstance = null;
449    Logging.d(TAG, "Java releaseEncoder done");
450  }
451
452  private boolean setRates(int kbps, int frameRateIgnored) {
453    // frameRate argument is ignored - HW encoder is supposed to use
454    // video frame timestamps for bit allocation.
455    checkOnMediaCodecThread();
456    Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + frameRateIgnored);
457    try {
458      Bundle params = new Bundle();
459      params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, 1000 * kbps);
460      mediaCodec.setParameters(params);
461      return true;
462    } catch (IllegalStateException e) {
463      Logging.e(TAG, "setRates failed", e);
464      return false;
465    }
466  }
467
468  // Dequeue an input buffer and return its index, -1 if no input buffer is
469  // available, or -2 if the codec is no longer operative.
470  int dequeueInputBuffer() {
471    checkOnMediaCodecThread();
472    try {
473      return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
474    } catch (IllegalStateException e) {
475      Logging.e(TAG, "dequeueIntputBuffer failed", e);
476      return -2;
477    }
478  }
479
480  // Helper struct for dequeueOutputBuffer() below.
481  static class OutputBufferInfo {
482    public OutputBufferInfo(
483        int index, ByteBuffer buffer,
484        boolean isKeyFrame, long presentationTimestampUs) {
485      this.index = index;
486      this.buffer = buffer;
487      this.isKeyFrame = isKeyFrame;
488      this.presentationTimestampUs = presentationTimestampUs;
489    }
490
491    public final int index;
492    public final ByteBuffer buffer;
493    public final boolean isKeyFrame;
494    public final long presentationTimestampUs;
495  }
496
497  // Dequeue and return an output buffer, or null if no output is ready.  Return
498  // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
499  OutputBufferInfo dequeueOutputBuffer() {
500    checkOnMediaCodecThread();
501    try {
502      MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
503      int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
504      // Check if this is config frame and save configuration data.
505      if (result >= 0) {
506        boolean isConfigFrame =
507            (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
508        if (isConfigFrame) {
509          Logging.d(TAG, "Config frame generated. Offset: " + info.offset +
510              ". Size: " + info.size);
511          configData = ByteBuffer.allocateDirect(info.size);
512          outputBuffers[result].position(info.offset);
513          outputBuffers[result].limit(info.offset + info.size);
514          configData.put(outputBuffers[result]);
515          // Release buffer back.
516          mediaCodec.releaseOutputBuffer(result, false);
517          // Query next output.
518          result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
519        }
520      }
521      if (result >= 0) {
522        // MediaCodec doesn't care about Buffer position/remaining/etc so we can
523        // mess with them to get a slice and avoid having to pass extra
524        // (BufferInfo-related) parameters back to C++.
525        ByteBuffer outputBuffer = outputBuffers[result].duplicate();
526        outputBuffer.position(info.offset);
527        outputBuffer.limit(info.offset + info.size);
528        // Check key frame flag.
529        boolean isKeyFrame =
530            (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
531        if (isKeyFrame) {
532          Logging.d(TAG, "Sync frame generated");
533        }
534        if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
535          Logging.d(TAG, "Appending config frame of size " + configData.capacity() +
536              " to output buffer with offset " + info.offset + ", size " +
537              info.size);
538          // For H.264 key frame append SPS and PPS NALs at the start
539          ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
540              configData.capacity() + info.size);
541          configData.rewind();
542          keyFrameBuffer.put(configData);
543          keyFrameBuffer.put(outputBuffer);
544          keyFrameBuffer.position(0);
545          return new OutputBufferInfo(result, keyFrameBuffer,
546              isKeyFrame, info.presentationTimeUs);
547        } else {
548          return new OutputBufferInfo(result, outputBuffer.slice(),
549              isKeyFrame, info.presentationTimeUs);
550        }
551      } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
552        outputBuffers = mediaCodec.getOutputBuffers();
553        return dequeueOutputBuffer();
554      } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
555        return dequeueOutputBuffer();
556      } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
557        return null;
558      }
559      throw new RuntimeException("dequeueOutputBuffer: " + result);
560    } catch (IllegalStateException e) {
561      Logging.e(TAG, "dequeueOutputBuffer failed", e);
562      return new OutputBufferInfo(-1, null, false, -1);
563    }
564  }
565
566  // Release a dequeued output buffer back to the codec for re-use.  Return
567  // false if the codec is no longer operable.
568  boolean releaseOutputBuffer(int index) {
569    checkOnMediaCodecThread();
570    try {
571      mediaCodec.releaseOutputBuffer(index, false);
572      return true;
573    } catch (IllegalStateException e) {
574      Logging.e(TAG, "releaseOutputBuffer failed", e);
575      return false;
576    }
577  }
578}
579