android_video_decode_accelerator.cc revision 9ab5563a3196760eb381d102cbb2bc0f7abc6a50
1// Copyright (c) 2013 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/common/gpu/media/android_video_decode_accelerator.h"
6
7#include "base/bind.h"
8#include "base/logging.h"
9#include "base/message_loop/message_loop.h"
10#include "content/common/gpu/gpu_channel.h"
11#include "gpu/command_buffer/service/gles2_cmd_decoder.h"
12#include "media/base/bitstream_buffer.h"
13#include "media/base/limits.h"
14#include "media/video/picture.h"
15#include "ui/gl/android/scoped_java_surface.h"
16#include "ui/gl/gl_bindings.h"
17
18namespace content {
19
20// Helper macros for dealing with failure.  If |result| evaluates false, emit
21// |log| to ERROR, register |error| with the decoder, and return.
22#define RETURN_ON_FAILURE(result, log, error)                       \
23  do {                                                              \
24    if (!(result)) {                                                \
25      DLOG(ERROR) << log;                                           \
26      base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \
27          &AndroidVideoDecodeAccelerator::NotifyError,              \
28          base::AsWeakPtr(this), error));                           \
29      state_ = ERROR;                                               \
30      return;                                                       \
31    }                                                               \
32  } while (0)
33
34// TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
35// phase, but 1 is added due to crbug.com/176036. This should be tuned when we
36// have actual use case.
37enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
38
39// Max number of bitstreams notified to the client with
40// NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
41enum { kMaxBitstreamsNotifiedInAdvance = 32 };
42
43// static
44const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay =
45    base::TimeDelta::FromMilliseconds(10);
46
47AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
48    media::VideoDecodeAccelerator::Client* client,
49    const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
50    const base::Callback<bool(void)>& make_context_current)
51    : client_(client),
52      make_context_current_(make_context_current),
53      codec_(media::kCodecH264),
54      state_(NO_ERROR),
55      surface_texture_id_(0),
56      picturebuffers_requested_(false),
57      io_task_is_posted_(false),
58      decoder_met_eos_(false),
59      num_bytes_used_in_the_pending_buffer_(0),
60      gl_decoder_(decoder) {
61}
62
63AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
64  DCHECK(thread_checker_.CalledOnValidThread());
65}
66
67bool AndroidVideoDecodeAccelerator::Initialize(
68    media::VideoCodecProfile profile) {
69  DCHECK(!media_codec_);
70  DCHECK(thread_checker_.CalledOnValidThread());
71
72  if (!media::MediaCodecBridge::IsAvailable())
73    return false;
74
75  if (profile == media::VP8PROFILE_MAIN) {
76    codec_ = media::kCodecVP8;
77  } else {
78    // TODO(dwkang): enable H264 once b/8125974 is fixed.
79    LOG(ERROR) << "Unsupported profile: " << profile;
80    return false;
81  }
82
83  if (!make_context_current_.Run()) {
84    LOG(ERROR) << "Failed to make this decoder's GL context current.";
85    return false;
86  }
87
88  if (!gl_decoder_) {
89    LOG(ERROR) << "Failed to get gles2 decoder instance.";
90    return false;
91  }
92  glGenTextures(1, &surface_texture_id_);
93  glActiveTexture(GL_TEXTURE0);
94  glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
95
96  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
97  glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
98  glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
99                  GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
100  glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
101                  GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
102  gl_decoder_->RestoreTextureUnitBindings(0);
103  gl_decoder_->RestoreActiveTexture();
104
105  surface_texture_ = new gfx::SurfaceTextureBridge(surface_texture_id_);
106
107  if (!ConfigureMediaCodec()) {
108    LOG(ERROR) << "Failed to create MediaCodec instance.";
109    return false;
110  }
111
112  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
113      &AndroidVideoDecodeAccelerator::NotifyInitializeDone,
114      base::AsWeakPtr(this)));
115  return true;
116}
117
118void AndroidVideoDecodeAccelerator::DoIOTask() {
119  io_task_is_posted_ = false;
120  if (state_ == ERROR) {
121    return;
122  }
123
124  DequeueOutput();
125  QueueInput();
126
127  if (!pending_bitstream_buffers_.empty() ||
128      !free_picture_ids_.empty()) {
129    io_task_is_posted_ = true;
130    // TODO(dwkang): PostDelayedTask() does not guarantee the task will awake
131    //               at the exact time. Need a better way for polling.
132    base::MessageLoop::current()->PostDelayedTask(
133        FROM_HERE,
134        base::Bind(
135            &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)),
136            kDecodePollDelay);
137  }
138}
139
140void AndroidVideoDecodeAccelerator::QueueInput() {
141  if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
142    return;
143  if (pending_bitstream_buffers_.empty())
144    return;
145
146  int input_buf_index = media_codec_->DequeueInputBuffer(
147      media::MediaCodecBridge::kTimeOutNoWait);
148  if (input_buf_index < 0) {
149    DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER);
150    return;
151  }
152  media::BitstreamBuffer& bitstream_buffer =
153      pending_bitstream_buffers_.front();
154
155  if (bitstream_buffer.id() == -1) {
156    media_codec_->QueueEOS(input_buf_index);
157    pending_bitstream_buffers_.pop();
158    return;
159  }
160  // Abuse the presentation time argument to propagate the bitstream
161  // buffer ID to the output, so we can report it back to the client in
162  // PictureReady().
163  base::TimeDelta timestamp =
164      base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
165
166  int bytes_written = 0;
167  scoped_ptr<base::SharedMemory> shm(
168      new base::SharedMemory(bitstream_buffer.handle(), true));
169
170  RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
171                    "Failed to SharedMemory::Map()",
172                    UNREADABLE_INPUT);
173
174  const size_t offset = num_bytes_used_in_the_pending_buffer_;
175  bytes_written = media_codec_->QueueInputBuffer(
176          input_buf_index,
177          static_cast<const uint8*>(shm->memory()) + offset,
178          bitstream_buffer.size() - offset, timestamp);
179  num_bytes_used_in_the_pending_buffer_ += bytes_written;
180  CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size());
181
182  if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) {
183    num_bytes_used_in_the_pending_buffer_ = 0;
184    pending_bitstream_buffers_.pop();
185
186    // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
187    // will be returned from the bitstream buffer. However, MediaCodec API is
188    // not enough to guarantee it.
189    // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
190    // keep getting more bitstreams from the client, and throttle them by using
191    // |bitstreams_notified_in_advance_|.
192    // TODO(dwkang): check if there is a way to remove this workaround.
193    base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
194        &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
195        base::AsWeakPtr(this), bitstream_buffer.id()));
196    bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
197  }
198}
199
200void AndroidVideoDecodeAccelerator::DequeueOutput() {
201  if (picturebuffers_requested_ && output_picture_buffers_.empty())
202    return;
203
204  if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
205    // Don't have any picture buffer to send. Need to wait more.
206    return;
207  }
208
209  bool eos = false;
210  base::TimeDelta timestamp;
211  int32 buf_index = 0;
212  do {
213    size_t offset = 0;
214    size_t size = 0;
215    buf_index = media_codec_->DequeueOutputBuffer(
216        media::MediaCodecBridge::kTimeOutNoWait,
217        &offset, &size, &timestamp, &eos);
218    switch (buf_index) {
219      case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER:
220        return;
221
222      case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: {
223        int32 width, height;
224        media_codec_->GetOutputFormat(&width, &height);
225
226        if (!picturebuffers_requested_) {
227          picturebuffers_requested_ = true;
228          size_ = gfx::Size(width, height);
229          base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
230              &AndroidVideoDecodeAccelerator::RequestPictureBuffers,
231              base::AsWeakPtr(this)));
232        } else {
233          // TODO(dwkang): support the dynamic resolution change.
234          // Currently, we assume that there is no resolution change in the
235          // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen
236          // more than once. However, we allows it if resolution is the same
237          // as the previous one because |media_codec_| can be reset in Reset().
238          RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
239                            "Dynamic resolution change is not supported.",
240                            PLATFORM_FAILURE);
241        }
242        return;
243      }
244
245      case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED:
246        media_codec_->GetOutputBuffers();
247        break;
248    }
249  } while (buf_index < 0);
250
251  media_codec_->ReleaseOutputBuffer(buf_index, true);
252
253  if (eos) {
254    base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
255        &AndroidVideoDecodeAccelerator::NotifyFlushDone,
256        base::AsWeakPtr(this)));
257    decoder_met_eos_ = true;
258  } else {
259    int64 bitstream_buffer_id = timestamp.InMicroseconds();
260    SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
261
262    // Removes ids former or equal than the id from decoder. Note that
263    // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
264    // because of frame reordering issue. We just maintain this roughly and use
265    // for the throttling purpose.
266    std::list<int32>::iterator it;
267    for (it = bitstreams_notified_in_advance_.begin();
268        it != bitstreams_notified_in_advance_.end();
269        ++it) {
270      if (*it == bitstream_buffer_id) {
271        bitstreams_notified_in_advance_.erase(
272            bitstreams_notified_in_advance_.begin(), ++it);
273        break;
274      }
275    }
276  }
277}
278
279void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
280    int32 bitstream_id) {
281  DCHECK(thread_checker_.CalledOnValidThread());
282  DCHECK_NE(bitstream_id, -1);
283  DCHECK(!free_picture_ids_.empty());
284
285  RETURN_ON_FAILURE(make_context_current_.Run(),
286                    "Failed to make this decoder's GL context current.",
287                    PLATFORM_FAILURE);
288
289  int32 picture_buffer_id = free_picture_ids_.front();
290  free_picture_ids_.pop();
291
292  float transfrom_matrix[16];
293  surface_texture_->UpdateTexImage();
294  surface_texture_->GetTransformMatrix(transfrom_matrix);
295
296  OutputBufferMap::const_iterator i =
297      output_picture_buffers_.find(picture_buffer_id);
298  RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
299                    "Can't find a PictureBuffer for " << picture_buffer_id,
300                    PLATFORM_FAILURE);
301  uint32 picture_buffer_texture_id = i->second.texture_id();
302
303  RETURN_ON_FAILURE(gl_decoder_.get(),
304                    "Failed to get gles2 decoder instance.",
305                    ILLEGAL_STATE);
306  // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
307  // needed because it takes 10s of milliseconds to initialize.
308  if (!copier_) {
309    copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
310    copier_->Initialize(gl_decoder_.get());
311  }
312
313  // Here, we copy |surface_texture_id_| to the picture buffer instead of
314  // setting new texture to |surface_texture_| by calling attachToGLContext()
315  // because:
316  // 1. Once we call detachFrameGLContext(), it deletes the texture previous
317  //    attached.
318  // 2. SurfaceTexture requires us to apply a transform matrix when we show
319  //    the texture.
320  copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES,
321                         GL_TEXTURE_2D, surface_texture_id_,
322                         picture_buffer_texture_id, 0, size_.width(),
323                         size_.height(), false, false, false);
324
325  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
326      &AndroidVideoDecodeAccelerator::NotifyPictureReady,
327      base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id)));
328}
329
330void AndroidVideoDecodeAccelerator::Decode(
331    const media::BitstreamBuffer& bitstream_buffer) {
332  DCHECK(thread_checker_.CalledOnValidThread());
333  if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
334    base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
335        &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
336        base::AsWeakPtr(this), bitstream_buffer.id()));
337    return;
338  }
339
340  pending_bitstream_buffers_.push(bitstream_buffer);
341
342  if (!io_task_is_posted_)
343    DoIOTask();
344}
345
346void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
347    const std::vector<media::PictureBuffer>& buffers) {
348  DCHECK(thread_checker_.CalledOnValidThread());
349  DCHECK(output_picture_buffers_.empty());
350
351  for (size_t i = 0; i < buffers.size(); ++i) {
352    RETURN_ON_FAILURE(buffers[i].size() == size_,
353                      "Invalid picture buffer size was passed.",
354                      INVALID_ARGUMENT);
355    output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i]));
356    free_picture_ids_.push(buffers[i].id());
357  }
358
359  RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers,
360                    "Invalid picture buffers were passed.",
361                    INVALID_ARGUMENT);
362
363  if (!io_task_is_posted_)
364    DoIOTask();
365}
366
367void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
368    int32 picture_buffer_id) {
369  DCHECK(thread_checker_.CalledOnValidThread());
370  free_picture_ids_.push(picture_buffer_id);
371
372  if (!io_task_is_posted_)
373    DoIOTask();
374}
375
376void AndroidVideoDecodeAccelerator::Flush() {
377  DCHECK(thread_checker_.CalledOnValidThread());
378
379  Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
380}
381
382bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
383  DCHECK(surface_texture_.get());
384  media_codec_.reset(media::VideoCodecBridge::Create(codec_));
385
386  if (!media_codec_)
387    return false;
388
389  gfx::ScopedJavaSurface surface(surface_texture_.get());
390  // VDA does not pass the container indicated resolution in the initialization
391  // phase. Here, we set 720p by default.
392  // TODO(dwkang): find out a way to remove the following hard-coded value.
393  media_codec_->Start(
394      codec_, gfx::Size(1280, 720), surface.j_surface().obj(), NULL);
395  media_codec_->GetOutputBuffers();
396  return true;
397}
398
399void AndroidVideoDecodeAccelerator::Reset() {
400  DCHECK(thread_checker_.CalledOnValidThread());
401
402  while(!pending_bitstream_buffers_.empty()) {
403    media::BitstreamBuffer& bitstream_buffer =
404        pending_bitstream_buffers_.front();
405    pending_bitstream_buffers_.pop();
406
407    if (bitstream_buffer.id() != -1) {
408      base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
409          &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
410          base::AsWeakPtr(this), bitstream_buffer.id()));
411    }
412  }
413  bitstreams_notified_in_advance_.clear();
414
415  if (!decoder_met_eos_) {
416    media_codec_->Reset();
417  } else {
418    // MediaCodec should be usable after meeting EOS, but it is not on some
419    // devices. b/8125974 To avoid the case, we recreate a new one.
420    media_codec_->Stop();
421    ConfigureMediaCodec();
422  }
423  decoder_met_eos_ = false;
424  num_bytes_used_in_the_pending_buffer_ = 0;
425  state_ = NO_ERROR;
426
427  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
428      &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
429}
430
431void AndroidVideoDecodeAccelerator::Destroy() {
432  DCHECK(thread_checker_.CalledOnValidThread());
433
434  if (media_codec_)
435    media_codec_->Stop();
436  if (surface_texture_id_)
437    glDeleteTextures(1, &surface_texture_id_);
438  if (copier_)
439    copier_->Destroy();
440  delete this;
441}
442
443void AndroidVideoDecodeAccelerator::NotifyInitializeDone() {
444  client_->NotifyInitializeDone();
445}
446
447void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
448  client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
449}
450
451void AndroidVideoDecodeAccelerator::NotifyPictureReady(
452    const media::Picture& picture) {
453  client_->PictureReady(picture);
454}
455
456void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
457    int input_buffer_id) {
458  client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
459}
460
461void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
462  client_->NotifyFlushDone();
463}
464
465void AndroidVideoDecodeAccelerator::NotifyResetDone() {
466  client_->NotifyResetDone();
467}
468
469void AndroidVideoDecodeAccelerator::NotifyError(
470    media::VideoDecodeAccelerator::Error error) {
471  client_->NotifyError(error);
472}
473
474}  // namespace content
475