1/*
2 *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11package org.webrtc.webrtcdemo;
12
13import android.app.AlertDialog;
14import android.content.Context;
15import android.content.DialogInterface;
16import android.media.MediaCodec;
17import android.media.MediaCrypto;
18import android.media.MediaExtractor;
19import android.media.MediaFormat;
20import android.os.Handler;
21import android.os.Looper;
22import android.os.Message;
23import android.util.Log;
24import android.view.Surface;
25import android.view.SurfaceView;
26
27import java.io.IOException;
28import java.nio.ByteBuffer;
29import java.util.LinkedList;
30
31class MediaCodecVideoDecoder {
32  public static final int DECODE = 0;
33  private enum CodecName { ON2_VP8, GOOGLE_VPX, EXYNOX_VP8 }
34
35  private void check(boolean value, String message) {
36    if (value) {
37      return;
38    }
39    Log.e("WEBRTC-CHECK", message);
40    AlertDialog alertDialog = new AlertDialog.Builder(context).create();
41    alertDialog.setTitle("WebRTC Error");
42    alertDialog.setMessage(message);
43    alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
44        "OK",
45        new DialogInterface.OnClickListener() {
46          public void onClick(DialogInterface dialog, int which) {
47            return;
48          }
49        }
50                          );
51    alertDialog.show();
52  }
53
54  class Frame {
55    public ByteBuffer buffer;
56    public long timestampUs;
57
58    Frame(ByteBuffer buffer, long timestampUs) {
59      this.buffer = buffer;
60      this.timestampUs = timestampUs;
61    }
62  }
63
64  // This class enables decoding being run on a separate thread.
65  class DecodeHandler extends Handler {
66    @Override
67    public void handleMessage(Message msg) {
68      // TODO(dwkang): figure out exceptions just make this thread finish.
69      try {
70        switch (msg.what) {
71          case DECODE:
72            decodePendingBuffers();
73            long delayMillis = 5;  // Don't busy wait.
74            handler.sendMessageDelayed(
75                handler.obtainMessage(DECODE), delayMillis);
76            break;
77          default:
78            break;
79        }
80      } catch (Exception e) {
81        e.printStackTrace();
82      }
83    }
84  }
85
86  private static String TAG;
87  private Context context;
88  private SurfaceView surfaceView;
89
90  private DecodeHandler handler;
91  private Thread looperThread;
92
93  MediaCodec codec;
94  MediaFormat format;
95
96  // Buffers supplied by MediaCodec for pushing encoded data to and pulling
97  // decoded data from.
98  private ByteBuffer[] codecInputBuffers;
99  private ByteBuffer[] codecOutputBuffers;
100
101  // Frames from the native layer.
102  private LinkedList<Frame> frameQueue;
103  // Indexes to MediaCodec buffers
104  private LinkedList<Integer> availableInputBufferIndices;
105  private LinkedList<Integer> availableOutputBufferIndices;
106  private LinkedList<MediaCodec.BufferInfo> availableOutputBufferInfos;
107
108  // Offset between system time and media time.
109  private long deltaTimeUs;
110
111  public MediaCodecVideoDecoder(Context context) {
112    TAG = context.getString(R.string.tag);
113    this.context = context;
114    surfaceView = new SurfaceView(context);
115    frameQueue = new LinkedList<Frame>();
116    availableInputBufferIndices = new LinkedList<Integer>();
117    availableOutputBufferIndices = new LinkedList<Integer>();
118    availableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
119  }
120
121  public void dispose() {
122    codec.stop();
123    codec.release();
124  }
125
126  // Return view that is written to by MediaCodec.
127  public SurfaceView getView() { return surfaceView; }
128
129  // Entry point from the native layer. Called when the class should be ready
130  // to start receiving raw frames.
131  private boolean start(int width, int height) {
132    deltaTimeUs = -1;
133    if (!setCodecState(width, height, CodecName.ON2_VP8)) {
134      return false;
135    }
136    startLooperThread();
137    // The decoding must happen on |looperThread| thread.
138    handler.sendMessage(handler.obtainMessage(DECODE));
139    return true;
140  }
141
142  private boolean setCodecState(int width, int height, CodecName codecName) {
143    // TODO(henrike): enable more than ON2_VP8 codec.
144    format = new MediaFormat();
145    format.setInteger(MediaFormat.KEY_WIDTH, width);
146    format.setInteger(MediaFormat.KEY_HEIGHT, height);
147    try {
148      switch (codecName) {
149        case ON2_VP8:
150          format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
151          codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
152          break;
153        case GOOGLE_VPX:
154          // SW VP8 decoder
155          codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
156          break;
157        case EXYNOX_VP8:
158          // Nexus10 HW VP8 decoder
159          codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
160          break;
161        default:
162          return false;
163      }
164    } catch  (Exception e) {
165      // TODO(dwkang): replace this instanceof/throw with a narrower catch
166      // clause once the SDK advances.
167      if (e instanceof IOException) {
168        Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
169        return false;
170      }
171      throw new RuntimeException(e);
172    }
173    Surface surface = surfaceView.getHolder().getSurface();
174    MediaCrypto crypto = null;  // No crypto.
175    int flags = 0;  // Decoder (1 for encoder)
176    codec.configure(format, surface, crypto, flags);
177    codec.start();
178    codecInputBuffers = codec.getInputBuffers();
179    codecOutputBuffers = codec.getOutputBuffers();
180    return true;
181  }
182
183  private void startLooperThread() {
184    looperThread = new Thread() {
185        @Override
186        public void run() {
187          Looper.prepare();
188          // Handler that is run by this thread.
189          handler = new DecodeHandler();
190          // Notify that the thread has created a handler.
191          synchronized(MediaCodecVideoDecoder.this) {
192            MediaCodecVideoDecoder.this.notify();
193          }
194          Looper.loop();
195        }
196      };
197    looperThread.start();
198    // Wait for thread to notify that Handler has been set up.
199    synchronized(this) {
200      try {
201        wait();
202      } catch (InterruptedException e) {
203        e.printStackTrace();
204      }
205    }
206  }
207
208  // Entry point from the native layer. It pushes the raw buffer to this class.
209  private void pushBuffer(ByteBuffer buffer, long renderTimeMs) {
210    // TODO(dwkang): figure out why exceptions just make this thread finish.
211    try {
212      final long renderTimeUs = renderTimeMs * 1000;
213      synchronized(frameQueue) {
214        frameQueue.add(new Frame(buffer, renderTimeUs));
215      }
216    } catch (Exception e) {
217      e.printStackTrace();
218    }
219  }
220
221  private boolean hasFrame() {
222    synchronized(frameQueue) {
223      return !frameQueue.isEmpty();
224    }
225  }
226
227  private Frame dequeueFrame() {
228    synchronized(frameQueue) {
229      return frameQueue.removeFirst();
230    }
231  }
232
233  private void flush() {
234    availableInputBufferIndices.clear();
235    availableOutputBufferIndices.clear();
236    availableOutputBufferInfos.clear();
237
238    codec.flush();
239  }
240
241  // Media time is relative to previous frame.
242  private long mediaTimeToSystemTime(long mediaTimeUs) {
243    if (deltaTimeUs == -1) {
244      long nowUs = System.currentTimeMillis() * 1000;
245      deltaTimeUs = nowUs - mediaTimeUs;
246    }
247    return deltaTimeUs + mediaTimeUs;
248  }
249
250  private void decodePendingBuffers() {
251    int timeoutUs = 0;  // Don't block on dequeuing input buffer.
252
253    int index = codec.dequeueInputBuffer(timeoutUs);
254    if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
255      availableInputBufferIndices.add(index);
256    }
257    while (feedInputBuffer()) {}
258
259    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
260    index = codec.dequeueOutputBuffer(info, timeoutUs);
261    if (index > 0) {
262      availableOutputBufferIndices.add(index);
263      availableOutputBufferInfos.add(info);
264    }
265    if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
266      codecOutputBuffers = codec.getOutputBuffers();
267    }
268
269    while (drainOutputBuffer()) {}
270  }
271
272  // Returns true if MediaCodec is ready for more data and there was data
273  // available from the native layer.
274  private boolean feedInputBuffer() {
275    if (availableInputBufferIndices.isEmpty()) {
276      return false;
277    }
278    if (!hasFrame()) {
279      return false;
280    }
281    Frame frame = dequeueFrame();
282    ByteBuffer buffer = frame.buffer;
283
284    int index = availableInputBufferIndices.pollFirst();
285    ByteBuffer codecData = codecInputBuffers[index];
286    check(codecData.capacity() >= buffer.capacity(),
287        "Buffer is too small to copy a frame.");
288    buffer.rewind();
289    codecData.rewind();
290    codecData.put(buffer);
291
292    try {
293      int offset = 0;
294      int flags = 0;
295      codec.queueInputBuffer(index, offset, buffer.capacity(),
296          frame.timestampUs, flags);
297    } catch (MediaCodec.CryptoException e) {
298      check(false, "CryptoException w/ errorCode " + e.getErrorCode() +
299          ", '" + e.getMessage() + "'");
300    }
301    return true;
302  }
303
304  // Returns true if more output data could be drained.MediaCodec has more data
305  // to deliver.
306  private boolean drainOutputBuffer() {
307    if (availableOutputBufferIndices.isEmpty()) {
308      return false;
309    }
310
311    int index = availableOutputBufferIndices.peekFirst();
312    MediaCodec.BufferInfo info = availableOutputBufferInfos.peekFirst();
313    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
314      // End of stream is unexpected with streamed video.
315      check(false, "Saw output end of stream.");
316      return false;
317    }
318    long realTimeUs = mediaTimeToSystemTime(info.presentationTimeUs);
319    long nowUs = System.currentTimeMillis() * 1000;
320    long lateUs = nowUs - realTimeUs;
321    if (lateUs < -10000) {
322      // Frame should not be presented yet.
323      return false;
324    }
325
326    // TODO(dwkang): For some extreme cases, just not doing rendering is not
327    // enough. Need to seek to the next key frame.
328    boolean render = lateUs <= 30000;
329    if (!render) {
330      Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
331    }
332    // Decode and render to surface if desired.
333    codec.releaseOutputBuffer(index, render);
334    availableOutputBufferIndices.removeFirst();
335    availableOutputBufferInfos.removeFirst();
336    return true;
337  }
338}
339