1// Copyright (c) 2013 The Chromium Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5package org.chromium.media; 6 7import android.media.AudioFormat; 8import android.media.AudioManager; 9import android.media.AudioTrack; 10import android.media.MediaCodec; 11import android.media.MediaCrypto; 12import android.media.MediaFormat; 13import android.view.Surface; 14import android.util.Log; 15 16import java.nio.ByteBuffer; 17 18import org.chromium.base.CalledByNative; 19import org.chromium.base.JNINamespace; 20 21/** 22 * A wrapper of the MediaCodec class to facilitate exception capturing and 23 * audio rendering. 24 */ 25@JNINamespace("media") 26class MediaCodecBridge { 27 28 private static final String TAG = "MediaCodecBridge"; 29 30 // Error code for MediaCodecBridge. Keep this value in sync with 31 // INFO_MEDIA_CODEC_ERROR in media_codec_bridge.h. 32 private static final int MEDIA_CODEC_OK = 0; 33 private static final int MEDIA_CODEC_ERROR = -1000; 34 35 // After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps 36 // for several frames. As a result, the player may find that the time does not increase 37 // after decoding a frame. To detect this, we check whether the presentation timestamp from 38 // dequeueOutputBuffer() is larger than input_timestamp - MAX_PRESENTATION_TIMESTAMP_SHIFT_US 39 // after a flush. And we set the presentation timestamp from dequeueOutputBuffer() to be 40 // non-decreasing for the remaining frames. 41 private static final long MAX_PRESENTATION_TIMESTAMP_SHIFT_US = 100000; 42 43 private ByteBuffer[] mInputBuffers; 44 private ByteBuffer[] mOutputBuffers; 45 46 private MediaCodec mMediaCodec; 47 private AudioTrack mAudioTrack; 48 private boolean mFlushed; 49 private long mLastPresentationTimeUs; 50 51 private static class DequeueOutputResult { 52 private final int mIndex; 53 private final int mFlags; 54 private final int mOffset; 55 private final long mPresentationTimeMicroseconds; 56 private final int mNumBytes; 57 58 private DequeueOutputResult(int index, int flags, int offset, 59 long presentationTimeMicroseconds, int numBytes) { 60 mIndex = index; 61 mFlags = flags; 62 mOffset = offset; 63 mPresentationTimeMicroseconds = presentationTimeMicroseconds; 64 mNumBytes = numBytes; 65 } 66 67 @CalledByNative("DequeueOutputResult") 68 private int index() { return mIndex; } 69 70 @CalledByNative("DequeueOutputResult") 71 private int flags() { return mFlags; } 72 73 @CalledByNative("DequeueOutputResult") 74 private int offset() { return mOffset; } 75 76 @CalledByNative("DequeueOutputResult") 77 private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; } 78 79 @CalledByNative("DequeueOutputResult") 80 private int numBytes() { return mNumBytes; } 81 } 82 83 private MediaCodecBridge(String mime) { 84 mMediaCodec = MediaCodec.createDecoderByType(mime); 85 mLastPresentationTimeUs = 0; 86 mFlushed = true; 87 } 88 89 @CalledByNative 90 private static MediaCodecBridge create(String mime) { 91 return new MediaCodecBridge(mime); 92 } 93 94 @CalledByNative 95 private void release() { 96 mMediaCodec.release(); 97 if (mAudioTrack != null) { 98 mAudioTrack.release(); 99 } 100 } 101 102 @CalledByNative 103 private void start() { 104 mMediaCodec.start(); 105 mInputBuffers = mMediaCodec.getInputBuffers(); 106 } 107 108 @CalledByNative 109 private int dequeueInputBuffer(long timeoutUs) { 110 try { 111 return mMediaCodec.dequeueInputBuffer(timeoutUs); 112 } catch(Exception e) { 113 Log.e(TAG, "Cannot dequeue Input buffer " + e.toString()); 114 } 115 return MEDIA_CODEC_ERROR; 116 } 117 118 @CalledByNative 119 private int flush() { 120 try { 121 mFlushed = true; 122 if (mAudioTrack != null) { 123 mAudioTrack.flush(); 124 } 125 mMediaCodec.flush(); 126 } catch(IllegalStateException e) { 127 Log.e(TAG, "Failed to flush MediaCodec " + e.toString()); 128 return MEDIA_CODEC_ERROR; 129 } 130 return MEDIA_CODEC_OK; 131 } 132 133 @CalledByNative 134 private void stop() { 135 mMediaCodec.stop(); 136 if (mAudioTrack != null) { 137 mAudioTrack.pause(); 138 } 139 } 140 141 @CalledByNative 142 private int getOutputHeight() { 143 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_HEIGHT); 144 } 145 146 @CalledByNative 147 private int getOutputWidth() { 148 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_WIDTH); 149 } 150 151 @CalledByNative 152 private ByteBuffer getInputBuffer(int index) { 153 return mInputBuffers[index]; 154 } 155 156 @CalledByNative 157 private ByteBuffer getOutputBuffer(int index) { 158 return mOutputBuffers[index]; 159 } 160 161 @CalledByNative 162 private void queueInputBuffer( 163 int index, int offset, int size, long presentationTimeUs, int flags) { 164 resetLastPresentationTimeIfNeeded(presentationTimeUs); 165 try { 166 mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags); 167 } catch(IllegalStateException e) { 168 Log.e(TAG, "Failed to queue input buffer " + e.toString()); 169 } 170 } 171 172 @CalledByNative 173 private void queueSecureInputBuffer( 174 int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData, 175 int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs) { 176 resetLastPresentationTimeIfNeeded(presentationTimeUs); 177 try { 178 MediaCodec.CryptoInfo cryptoInfo = new MediaCodec.CryptoInfo(); 179 cryptoInfo.set(numSubSamples, numBytesOfClearData, numBytesOfEncryptedData, 180 keyId, iv, MediaCodec.CRYPTO_MODE_AES_CTR); 181 mMediaCodec.queueSecureInputBuffer(index, offset, cryptoInfo, presentationTimeUs, 0); 182 } catch(IllegalStateException e) { 183 Log.e(TAG, "Failed to queue secure input buffer " + e.toString()); 184 } 185 } 186 187 @CalledByNative 188 private void releaseOutputBuffer(int index, boolean render) { 189 mMediaCodec.releaseOutputBuffer(index, render); 190 } 191 192 @CalledByNative 193 private void getOutputBuffers() { 194 mOutputBuffers = mMediaCodec.getOutputBuffers(); 195 } 196 197 @CalledByNative 198 private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) { 199 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 200 int index = MEDIA_CODEC_ERROR; 201 try { 202 index = mMediaCodec.dequeueOutputBuffer(info, timeoutUs); 203 if (info.presentationTimeUs < mLastPresentationTimeUs) { 204 // TODO(qinmin): return a special code through DequeueOutputResult 205 // to notify the native code the the frame has a wrong presentation 206 // timestamp and should be skipped. 207 info.presentationTimeUs = mLastPresentationTimeUs; 208 } 209 mLastPresentationTimeUs = info.presentationTimeUs; 210 } catch (IllegalStateException e) { 211 Log.e(TAG, "Cannot dequeue output buffer " + e.toString()); 212 } 213 return new DequeueOutputResult( 214 index, info.flags, info.offset, info.presentationTimeUs, info.size); 215 } 216 217 @CalledByNative 218 private boolean configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto, 219 int flags) { 220 try { 221 mMediaCodec.configure(format, surface, crypto, flags); 222 return true; 223 } catch (IllegalStateException e) { 224 Log.e(TAG, "Cannot configure the video codec " + e.toString()); 225 } 226 return false; 227 } 228 229 @CalledByNative 230 private static MediaFormat createAudioFormat(String mime, int SampleRate, int ChannelCount) { 231 return MediaFormat.createAudioFormat(mime, SampleRate, ChannelCount); 232 } 233 234 @CalledByNative 235 private static MediaFormat createVideoFormat(String mime, int width, int height) { 236 return MediaFormat.createVideoFormat(mime, width, height); 237 } 238 239 @CalledByNative 240 private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) { 241 String name = null; 242 if (index == 0) { 243 name = "csd-0"; 244 } else if (index == 1) { 245 name = "csd-1"; 246 } 247 if (name != null) { 248 format.setByteBuffer(name, ByteBuffer.wrap(bytes)); 249 } 250 } 251 252 @CalledByNative 253 private static void setFrameHasADTSHeader(MediaFormat format) { 254 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); 255 } 256 257 @CalledByNative 258 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags, 259 boolean playAudio) { 260 try { 261 mMediaCodec.configure(format, null, crypto, flags); 262 if (playAudio) { 263 int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); 264 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 265 int channelConfig = (channelCount == 1) ? AudioFormat.CHANNEL_OUT_MONO : 266 AudioFormat.CHANNEL_OUT_STEREO; 267 // Using 16bit PCM for output. Keep this value in sync with 268 // kBytesPerAudioOutputSample in media_codec_bridge.cc. 269 int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, 270 AudioFormat.ENCODING_PCM_16BIT); 271 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, 272 AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); 273 } 274 return true; 275 } catch (IllegalStateException e) { 276 Log.e(TAG, "Cannot configure the audio codec " + e.toString()); 277 } 278 return false; 279 } 280 281 @CalledByNative 282 private void playOutputBuffer(byte[] buf) { 283 if (mAudioTrack != null) { 284 if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) { 285 mAudioTrack.play(); 286 } 287 int size = mAudioTrack.write(buf, 0, buf.length); 288 if (buf.length != size) { 289 Log.i(TAG, "Failed to send all data to audio output, expected size: " + 290 buf.length + ", actual size: " + size); 291 } 292 } 293 } 294 295 @CalledByNative 296 private void setVolume(double volume) { 297 if (mAudioTrack != null) { 298 mAudioTrack.setStereoVolume((float) volume, (float) volume); 299 } 300 } 301 302 private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) { 303 if (mFlushed) { 304 mLastPresentationTimeUs = 305 Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHIFT_US, 0); 306 mFlushed = false; 307 } 308 } 309} 310