1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <dlfcn.h>
6#include <errno.h>
7#include <fcntl.h>
8#include <linux/videodev2.h>
9#include <poll.h>
10#include <sys/eventfd.h>
11#include <sys/ioctl.h>
12#include <sys/mman.h>
13
14#include "base/bind.h"
15#include "base/command_line.h"
16#include "base/debug/trace_event.h"
17#include "base/memory/shared_memory.h"
18#include "base/message_loop/message_loop.h"
19#include "base/message_loop/message_loop_proxy.h"
20#include "base/numerics/safe_conversions.h"
21#include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
22#include "media/base/media_switches.h"
23#include "media/filters/h264_parser.h"
24#include "ui/gl/scoped_binders.h"
25
26#define NOTIFY_ERROR(x)                            \
27  do {                                             \
28    SetDecoderState(kError);                       \
29    DLOG(ERROR) << "calling NotifyError(): " << x; \
30    NotifyError(x);                                \
31  } while (0)
32
33#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value)              \
34  do {                                                             \
35    if (device_->Ioctl(type, arg) != 0) {                          \
36      DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
37      NOTIFY_ERROR(PLATFORM_FAILURE);                              \
38      return value;                                                \
39    }                                                              \
40  } while (0)
41
42#define IOCTL_OR_ERROR_RETURN(type, arg) \
43  IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
44
45#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
46  IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
47
48#define IOCTL_OR_LOG_ERROR(type, arg)                              \
49  do {                                                             \
50    if (device_->Ioctl(type, arg) != 0)                            \
51      DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
52  } while (0)
53
54namespace content {
55
56namespace {
57
58// TODO(posciak): remove once we update linux-headers.
59#ifndef V4L2_EVENT_RESOLUTION_CHANGE
60#define V4L2_EVENT_RESOLUTION_CHANGE 5
61#endif
62
63}  // anonymous namespace
64
65struct V4L2VideoDecodeAccelerator::BitstreamBufferRef {
66  BitstreamBufferRef(
67      base::WeakPtr<Client>& client,
68      scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
69      base::SharedMemory* shm,
70      size_t size,
71      int32 input_id);
72  ~BitstreamBufferRef();
73  const base::WeakPtr<Client> client;
74  const scoped_refptr<base::MessageLoopProxy> client_message_loop_proxy;
75  const scoped_ptr<base::SharedMemory> shm;
76  const size_t size;
77  off_t bytes_used;
78  const int32 input_id;
79};
80
81struct V4L2VideoDecodeAccelerator::EGLSyncKHRRef {
82  EGLSyncKHRRef(EGLDisplay egl_display, EGLSyncKHR egl_sync);
83  ~EGLSyncKHRRef();
84  EGLDisplay const egl_display;
85  EGLSyncKHR egl_sync;
86};
87
88struct V4L2VideoDecodeAccelerator::PictureRecord {
89  PictureRecord(bool cleared, const media::Picture& picture);
90  ~PictureRecord();
91  bool cleared;  // Whether the texture is cleared and safe to render from.
92  media::Picture picture;  // The decoded picture.
93};
94
95V4L2VideoDecodeAccelerator::BitstreamBufferRef::BitstreamBufferRef(
96    base::WeakPtr<Client>& client,
97    scoped_refptr<base::MessageLoopProxy>& client_message_loop_proxy,
98    base::SharedMemory* shm, size_t size, int32 input_id)
99    : client(client),
100      client_message_loop_proxy(client_message_loop_proxy),
101      shm(shm),
102      size(size),
103      bytes_used(0),
104      input_id(input_id) {
105}
106
107V4L2VideoDecodeAccelerator::BitstreamBufferRef::~BitstreamBufferRef() {
108  if (input_id >= 0) {
109    client_message_loop_proxy->PostTask(FROM_HERE, base::Bind(
110        &Client::NotifyEndOfBitstreamBuffer, client, input_id));
111  }
112}
113
114V4L2VideoDecodeAccelerator::EGLSyncKHRRef::EGLSyncKHRRef(
115    EGLDisplay egl_display, EGLSyncKHR egl_sync)
116    : egl_display(egl_display),
117      egl_sync(egl_sync) {
118}
119
120V4L2VideoDecodeAccelerator::EGLSyncKHRRef::~EGLSyncKHRRef() {
121  // We don't check for eglDestroySyncKHR failures, because if we get here
122  // with a valid sync object, something went wrong and we are getting
123  // destroyed anyway.
124  if (egl_sync != EGL_NO_SYNC_KHR)
125    eglDestroySyncKHR(egl_display, egl_sync);
126}
127
128V4L2VideoDecodeAccelerator::InputRecord::InputRecord()
129    : at_device(false),
130      address(NULL),
131      length(0),
132      bytes_used(0),
133      input_id(-1) {
134}
135
136V4L2VideoDecodeAccelerator::InputRecord::~InputRecord() {
137}
138
139V4L2VideoDecodeAccelerator::OutputRecord::OutputRecord()
140    : at_device(false),
141      at_client(false),
142      egl_image(EGL_NO_IMAGE_KHR),
143      egl_sync(EGL_NO_SYNC_KHR),
144      picture_id(-1),
145      cleared(false) {
146}
147
148V4L2VideoDecodeAccelerator::OutputRecord::~OutputRecord() {}
149
150V4L2VideoDecodeAccelerator::PictureRecord::PictureRecord(
151    bool cleared,
152    const media::Picture& picture)
153    : cleared(cleared), picture(picture) {}
154
155V4L2VideoDecodeAccelerator::PictureRecord::~PictureRecord() {}
156
157V4L2VideoDecodeAccelerator::V4L2VideoDecodeAccelerator(
158    EGLDisplay egl_display,
159    EGLContext egl_context,
160    const base::WeakPtr<Client>& io_client,
161    const base::Callback<bool(void)>& make_context_current,
162    scoped_ptr<V4L2Device> device,
163    const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy)
164    : child_message_loop_proxy_(base::MessageLoopProxy::current()),
165      io_message_loop_proxy_(io_message_loop_proxy),
166      io_client_(io_client),
167      decoder_thread_("V4L2DecoderThread"),
168      decoder_state_(kUninitialized),
169      device_(device.Pass()),
170      decoder_delay_bitstream_buffer_id_(-1),
171      decoder_current_input_buffer_(-1),
172      decoder_decode_buffer_tasks_scheduled_(0),
173      decoder_frames_at_client_(0),
174      decoder_flushing_(false),
175      resolution_change_pending_(false),
176      resolution_change_reset_pending_(false),
177      decoder_partial_frame_pending_(false),
178      input_streamon_(false),
179      input_buffer_queued_count_(0),
180      output_streamon_(false),
181      output_buffer_queued_count_(0),
182      output_dpb_size_(0),
183      output_planes_count_(0),
184      picture_clearing_count_(0),
185      pictures_assigned_(false, false),
186      device_poll_thread_("V4L2DevicePollThread"),
187      make_context_current_(make_context_current),
188      egl_display_(egl_display),
189      egl_context_(egl_context),
190      video_profile_(media::VIDEO_CODEC_PROFILE_UNKNOWN),
191      weak_this_factory_(this) {
192  weak_this_ = weak_this_factory_.GetWeakPtr();
193}
194
195V4L2VideoDecodeAccelerator::~V4L2VideoDecodeAccelerator() {
196  DCHECK(!decoder_thread_.IsRunning());
197  DCHECK(!device_poll_thread_.IsRunning());
198
199  DestroyInputBuffers();
200  DestroyOutputBuffers();
201
202  // These maps have members that should be manually destroyed, e.g. file
203  // descriptors, mmap() segments, etc.
204  DCHECK(input_buffer_map_.empty());
205  DCHECK(output_buffer_map_.empty());
206}
207
208bool V4L2VideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
209                                            Client* client) {
210  DVLOG(3) << "Initialize()";
211  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
212  DCHECK_EQ(decoder_state_, kUninitialized);
213
214  client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
215  client_ = client_ptr_factory_->GetWeakPtr();
216
217  switch (profile) {
218    case media::H264PROFILE_BASELINE:
219      DVLOG(2) << "Initialize(): profile H264PROFILE_BASELINE";
220      break;
221    case media::H264PROFILE_MAIN:
222      DVLOG(2) << "Initialize(): profile H264PROFILE_MAIN";
223      break;
224    case media::H264PROFILE_HIGH:
225      DVLOG(2) << "Initialize(): profile H264PROFILE_HIGH";
226      break;
227    case media::VP8PROFILE_ANY:
228      DVLOG(2) << "Initialize(): profile VP8PROFILE_ANY";
229      break;
230    default:
231      DLOG(ERROR) << "Initialize(): unsupported profile=" << profile;
232      return false;
233  };
234  video_profile_ = profile;
235
236  if (egl_display_ == EGL_NO_DISPLAY) {
237    DLOG(ERROR) << "Initialize(): could not get EGLDisplay";
238    NOTIFY_ERROR(PLATFORM_FAILURE);
239    return false;
240  }
241
242  // We need the context to be initialized to query extensions.
243  if (!make_context_current_.Run()) {
244    DLOG(ERROR) << "Initialize(): could not make context current";
245    NOTIFY_ERROR(PLATFORM_FAILURE);
246    return false;
247  }
248
249  if (!gfx::g_driver_egl.ext.b_EGL_KHR_fence_sync) {
250    DLOG(ERROR) << "Initialize(): context does not have EGL_KHR_fence_sync";
251    NOTIFY_ERROR(PLATFORM_FAILURE);
252    return false;
253  }
254
255  // Capabilities check.
256  struct v4l2_capability caps;
257  const __u32 kCapsRequired =
258      V4L2_CAP_VIDEO_CAPTURE_MPLANE |
259      V4L2_CAP_VIDEO_OUTPUT_MPLANE |
260      V4L2_CAP_STREAMING;
261  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
262  if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
263    DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP"
264        ", caps check failed: 0x" << std::hex << caps.capabilities;
265    NOTIFY_ERROR(PLATFORM_FAILURE);
266    return false;
267  }
268
269  if (!CreateInputBuffers())
270    return false;
271
272  // Output format has to be setup before streaming starts.
273  struct v4l2_format format;
274  memset(&format, 0, sizeof(format));
275  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
276  uint32 output_format_fourcc = device_->PreferredOutputFormat();
277  if (output_format_fourcc == 0) {
278    // TODO(posciak): We should enumerate available output formats, as well as
279    // take into account formats that the client is ready to accept.
280    return false;
281  }
282  format.fmt.pix_mp.pixelformat = output_format_fourcc;
283  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
284
285  // Subscribe to the resolution change event.
286  struct v4l2_event_subscription sub;
287  memset(&sub, 0, sizeof(sub));
288  sub.type = V4L2_EVENT_RESOLUTION_CHANGE;
289  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_SUBSCRIBE_EVENT, &sub);
290
291  // Initialize format-specific bits.
292  if (video_profile_ >= media::H264PROFILE_MIN &&
293      video_profile_ <= media::H264PROFILE_MAX) {
294    decoder_h264_parser_.reset(new media::H264Parser());
295  }
296
297  if (!decoder_thread_.Start()) {
298    DLOG(ERROR) << "Initialize(): decoder thread failed to start";
299    NOTIFY_ERROR(PLATFORM_FAILURE);
300    return false;
301  }
302
303  // StartDevicePoll will NOTIFY_ERROR on failure, so IgnoreResult is fine here.
304  decoder_thread_.message_loop()->PostTask(
305      FROM_HERE,
306      base::Bind(
307          base::IgnoreResult(&V4L2VideoDecodeAccelerator::StartDevicePoll),
308          base::Unretained(this)));
309
310  SetDecoderState(kInitialized);
311  return true;
312}
313
314void V4L2VideoDecodeAccelerator::Decode(
315    const media::BitstreamBuffer& bitstream_buffer) {
316  DVLOG(1) << "Decode(): input_id=" << bitstream_buffer.id()
317           << ", size=" << bitstream_buffer.size();
318  DCHECK(io_message_loop_proxy_->BelongsToCurrentThread());
319
320  // DecodeTask() will take care of running a DecodeBufferTask().
321  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
322      &V4L2VideoDecodeAccelerator::DecodeTask, base::Unretained(this),
323      bitstream_buffer));
324}
325
326void V4L2VideoDecodeAccelerator::AssignPictureBuffers(
327    const std::vector<media::PictureBuffer>& buffers) {
328  DVLOG(3) << "AssignPictureBuffers(): buffer_count=" << buffers.size();
329  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
330
331  if (buffers.size() != output_buffer_map_.size()) {
332    DLOG(ERROR) << "AssignPictureBuffers(): Failed to provide requested picture"
333                   " buffers. (Got " << buffers.size()
334                << ", requested " << output_buffer_map_.size() << ")";
335    NOTIFY_ERROR(INVALID_ARGUMENT);
336    return;
337  }
338
339  if (!make_context_current_.Run()) {
340    DLOG(ERROR) << "AssignPictureBuffers(): could not make context current";
341    NOTIFY_ERROR(PLATFORM_FAILURE);
342    return;
343  }
344
345  gfx::ScopedTextureBinder bind_restore(GL_TEXTURE_EXTERNAL_OES, 0);
346
347  // It's safe to manipulate all the buffer state here, because the decoder
348  // thread is waiting on pictures_assigned_.
349  DCHECK(free_output_buffers_.empty());
350  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
351    DCHECK(buffers[i].size() == frame_buffer_size_);
352
353    OutputRecord& output_record = output_buffer_map_[i];
354    DCHECK(!output_record.at_device);
355    DCHECK(!output_record.at_client);
356    DCHECK_EQ(output_record.egl_image, EGL_NO_IMAGE_KHR);
357    DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
358    DCHECK_EQ(output_record.picture_id, -1);
359    DCHECK_EQ(output_record.cleared, false);
360
361    EGLImageKHR egl_image = device_->CreateEGLImage(egl_display_,
362                                                    egl_context_,
363                                                    buffers[i].texture_id(),
364                                                    frame_buffer_size_,
365                                                    i,
366                                                    output_planes_count_);
367    if (egl_image == EGL_NO_IMAGE_KHR) {
368      DLOG(ERROR) << "AssignPictureBuffers(): could not create EGLImageKHR";
369      // Ownership of EGLImages allocated in previous iterations of this loop
370      // has been transferred to output_buffer_map_. After we error-out here
371      // the destructor will handle their cleanup.
372      NOTIFY_ERROR(PLATFORM_FAILURE);
373      return;
374    }
375
376    output_record.egl_image = egl_image;
377    output_record.picture_id = buffers[i].id();
378    free_output_buffers_.push(i);
379    DVLOG(3) << "AssignPictureBuffers(): buffer[" << i
380             << "]: picture_id=" << output_record.picture_id;
381  }
382
383  pictures_assigned_.Signal();
384}
385
386void V4L2VideoDecodeAccelerator::ReusePictureBuffer(int32 picture_buffer_id) {
387  DVLOG(3) << "ReusePictureBuffer(): picture_buffer_id=" << picture_buffer_id;
388  // Must be run on child thread, as we'll insert a sync in the EGL context.
389  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
390
391  if (!make_context_current_.Run()) {
392    DLOG(ERROR) << "ReusePictureBuffer(): could not make context current";
393    NOTIFY_ERROR(PLATFORM_FAILURE);
394    return;
395  }
396
397  EGLSyncKHR egl_sync =
398      eglCreateSyncKHR(egl_display_, EGL_SYNC_FENCE_KHR, NULL);
399  if (egl_sync == EGL_NO_SYNC_KHR) {
400    DLOG(ERROR) << "ReusePictureBuffer(): eglCreateSyncKHR() failed";
401    NOTIFY_ERROR(PLATFORM_FAILURE);
402    return;
403  }
404
405  scoped_ptr<EGLSyncKHRRef> egl_sync_ref(new EGLSyncKHRRef(
406      egl_display_, egl_sync));
407  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
408      &V4L2VideoDecodeAccelerator::ReusePictureBufferTask,
409      base::Unretained(this), picture_buffer_id, base::Passed(&egl_sync_ref)));
410}
411
412void V4L2VideoDecodeAccelerator::Flush() {
413  DVLOG(3) << "Flush()";
414  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
415  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
416      &V4L2VideoDecodeAccelerator::FlushTask, base::Unretained(this)));
417}
418
419void V4L2VideoDecodeAccelerator::Reset() {
420  DVLOG(3) << "Reset()";
421  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
422  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
423      &V4L2VideoDecodeAccelerator::ResetTask, base::Unretained(this)));
424}
425
426void V4L2VideoDecodeAccelerator::Destroy() {
427  DVLOG(3) << "Destroy()";
428  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
429
430  // We're destroying; cancel all callbacks.
431  client_ptr_factory_.reset();
432  weak_this_factory_.InvalidateWeakPtrs();
433
434  // If the decoder thread is running, destroy using posted task.
435  if (decoder_thread_.IsRunning()) {
436    decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
437        &V4L2VideoDecodeAccelerator::DestroyTask, base::Unretained(this)));
438    pictures_assigned_.Signal();
439    // DestroyTask() will cause the decoder_thread_ to flush all tasks.
440    decoder_thread_.Stop();
441  } else {
442    // Otherwise, call the destroy task directly.
443    DestroyTask();
444  }
445
446  // Set to kError state just in case.
447  SetDecoderState(kError);
448
449  delete this;
450}
451
452bool V4L2VideoDecodeAccelerator::CanDecodeOnIOThread() { return true; }
453
454void V4L2VideoDecodeAccelerator::DecodeTask(
455    const media::BitstreamBuffer& bitstream_buffer) {
456  DVLOG(3) << "DecodeTask(): input_id=" << bitstream_buffer.id();
457  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
458  DCHECK_NE(decoder_state_, kUninitialized);
459  TRACE_EVENT1("Video Decoder", "V4L2VDA::DecodeTask", "input_id",
460               bitstream_buffer.id());
461
462  scoped_ptr<BitstreamBufferRef> bitstream_record(new BitstreamBufferRef(
463      io_client_, io_message_loop_proxy_,
464      new base::SharedMemory(bitstream_buffer.handle(), true),
465      bitstream_buffer.size(), bitstream_buffer.id()));
466  if (!bitstream_record->shm->Map(bitstream_buffer.size())) {
467    DLOG(ERROR) << "Decode(): could not map bitstream_buffer";
468    NOTIFY_ERROR(UNREADABLE_INPUT);
469    return;
470  }
471  DVLOG(3) << "DecodeTask(): mapped at=" << bitstream_record->shm->memory();
472
473  if (decoder_state_ == kResetting || decoder_flushing_) {
474    // In the case that we're resetting or flushing, we need to delay decoding
475    // the BitstreamBuffers that come after the Reset() or Flush() call.  When
476    // we're here, we know that this DecodeTask() was scheduled by a Decode()
477    // call that came after (in the client thread) the Reset() or Flush() call;
478    // thus set up the delay if necessary.
479    if (decoder_delay_bitstream_buffer_id_ == -1)
480      decoder_delay_bitstream_buffer_id_ = bitstream_record->input_id;
481  } else if (decoder_state_ == kError) {
482    DVLOG(2) << "DecodeTask(): early out: kError state";
483    return;
484  }
485
486  decoder_input_queue_.push(
487      linked_ptr<BitstreamBufferRef>(bitstream_record.release()));
488  decoder_decode_buffer_tasks_scheduled_++;
489  DecodeBufferTask();
490}
491
492void V4L2VideoDecodeAccelerator::DecodeBufferTask() {
493  DVLOG(3) << "DecodeBufferTask()";
494  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
495  DCHECK_NE(decoder_state_, kUninitialized);
496  TRACE_EVENT0("Video Decoder", "V4L2VDA::DecodeBufferTask");
497
498  decoder_decode_buffer_tasks_scheduled_--;
499
500  if (decoder_state_ == kResetting) {
501    DVLOG(2) << "DecodeBufferTask(): early out: kResetting state";
502    return;
503  } else if (decoder_state_ == kError) {
504    DVLOG(2) << "DecodeBufferTask(): early out: kError state";
505    return;
506  } else if (decoder_state_ == kChangingResolution) {
507    DVLOG(2) << "DecodeBufferTask(): early out: resolution change pending";
508    return;
509  }
510
511  if (decoder_current_bitstream_buffer_ == NULL) {
512    if (decoder_input_queue_.empty()) {
513      // We're waiting for a new buffer -- exit without scheduling a new task.
514      return;
515    }
516    linked_ptr<BitstreamBufferRef>& buffer_ref = decoder_input_queue_.front();
517    if (decoder_delay_bitstream_buffer_id_ == buffer_ref->input_id) {
518      // We're asked to delay decoding on this and subsequent buffers.
519      return;
520    }
521
522    // Setup to use the next buffer.
523    decoder_current_bitstream_buffer_.reset(buffer_ref.release());
524    decoder_input_queue_.pop();
525    DVLOG(3) << "DecodeBufferTask(): reading input_id="
526             << decoder_current_bitstream_buffer_->input_id
527             << ", addr=" << (decoder_current_bitstream_buffer_->shm ?
528                              decoder_current_bitstream_buffer_->shm->memory() :
529                              NULL)
530             << ", size=" << decoder_current_bitstream_buffer_->size;
531  }
532  bool schedule_task = false;
533  const size_t size = decoder_current_bitstream_buffer_->size;
534  size_t decoded_size = 0;
535  if (size == 0) {
536    const int32 input_id = decoder_current_bitstream_buffer_->input_id;
537    if (input_id >= 0) {
538      // This is a buffer queued from the client that has zero size.  Skip.
539      schedule_task = true;
540    } else {
541      // This is a buffer of zero size, queued to flush the pipe.  Flush.
542      DCHECK_EQ(decoder_current_bitstream_buffer_->shm.get(),
543                static_cast<base::SharedMemory*>(NULL));
544      // Enqueue a buffer guaranteed to be empty.  To do that, we flush the
545      // current input, enqueue no data to the next frame, then flush that down.
546      schedule_task = true;
547      if (decoder_current_input_buffer_ != -1 &&
548          input_buffer_map_[decoder_current_input_buffer_].input_id !=
549              kFlushBufferId)
550        schedule_task = FlushInputFrame();
551
552      if (schedule_task && AppendToInputFrame(NULL, 0) && FlushInputFrame()) {
553        DVLOG(2) << "DecodeBufferTask(): enqueued flush buffer";
554        decoder_partial_frame_pending_ = false;
555        schedule_task = true;
556      } else {
557        // If we failed to enqueue the empty buffer (due to pipeline
558        // backpressure), don't advance the bitstream buffer queue, and don't
559        // schedule the next task.  This bitstream buffer queue entry will get
560        // reprocessed when the pipeline frees up.
561        schedule_task = false;
562      }
563    }
564  } else {
565    // This is a buffer queued from the client, with actual contents.  Decode.
566    const uint8* const data =
567        reinterpret_cast<const uint8*>(
568            decoder_current_bitstream_buffer_->shm->memory()) +
569        decoder_current_bitstream_buffer_->bytes_used;
570    const size_t data_size =
571        decoder_current_bitstream_buffer_->size -
572        decoder_current_bitstream_buffer_->bytes_used;
573    if (!AdvanceFrameFragment(data, data_size, &decoded_size)) {
574      NOTIFY_ERROR(UNREADABLE_INPUT);
575      return;
576    }
577    // AdvanceFrameFragment should not return a size larger than the buffer
578    // size, even on invalid data.
579    CHECK_LE(decoded_size, data_size);
580
581    switch (decoder_state_) {
582      case kInitialized:
583      case kAfterReset:
584        schedule_task = DecodeBufferInitial(data, decoded_size, &decoded_size);
585        break;
586      case kDecoding:
587        schedule_task = DecodeBufferContinue(data, decoded_size);
588        break;
589      default:
590        NOTIFY_ERROR(ILLEGAL_STATE);
591        return;
592    }
593  }
594  if (decoder_state_ == kError) {
595    // Failed during decode.
596    return;
597  }
598
599  if (schedule_task) {
600    decoder_current_bitstream_buffer_->bytes_used += decoded_size;
601    if (decoder_current_bitstream_buffer_->bytes_used ==
602        decoder_current_bitstream_buffer_->size) {
603      // Our current bitstream buffer is done; return it.
604      int32 input_id = decoder_current_bitstream_buffer_->input_id;
605      DVLOG(3) << "DecodeBufferTask(): finished input_id=" << input_id;
606      // BitstreamBufferRef destructor calls NotifyEndOfBitstreamBuffer().
607      decoder_current_bitstream_buffer_.reset();
608    }
609    ScheduleDecodeBufferTaskIfNeeded();
610  }
611}
612
613bool V4L2VideoDecodeAccelerator::AdvanceFrameFragment(
614    const uint8* data,
615    size_t size,
616    size_t* endpos) {
617  if (video_profile_ >= media::H264PROFILE_MIN &&
618      video_profile_ <= media::H264PROFILE_MAX) {
619    // For H264, we need to feed HW one frame at a time.  This is going to take
620    // some parsing of our input stream.
621    decoder_h264_parser_->SetStream(data, size);
622    media::H264NALU nalu;
623    media::H264Parser::Result result;
624    *endpos = 0;
625
626    // Keep on peeking the next NALs while they don't indicate a frame
627    // boundary.
628    for (;;) {
629      bool end_of_frame = false;
630      result = decoder_h264_parser_->AdvanceToNextNALU(&nalu);
631      if (result == media::H264Parser::kInvalidStream ||
632          result == media::H264Parser::kUnsupportedStream)
633        return false;
634      if (result == media::H264Parser::kEOStream) {
635        // We've reached the end of the buffer before finding a frame boundary.
636        decoder_partial_frame_pending_ = true;
637        return true;
638      }
639      switch (nalu.nal_unit_type) {
640        case media::H264NALU::kNonIDRSlice:
641        case media::H264NALU::kIDRSlice:
642          if (nalu.size < 1)
643            return false;
644          // For these two, if the "first_mb_in_slice" field is zero, start a
645          // new frame and return.  This field is Exp-Golomb coded starting on
646          // the eighth data bit of the NAL; a zero value is encoded with a
647          // leading '1' bit in the byte, which we can detect as the byte being
648          // (unsigned) greater than or equal to 0x80.
649          if (nalu.data[1] >= 0x80) {
650            end_of_frame = true;
651            break;
652          }
653          break;
654        case media::H264NALU::kSEIMessage:
655        case media::H264NALU::kSPS:
656        case media::H264NALU::kPPS:
657        case media::H264NALU::kAUD:
658        case media::H264NALU::kEOSeq:
659        case media::H264NALU::kEOStream:
660        case media::H264NALU::kReserved14:
661        case media::H264NALU::kReserved15:
662        case media::H264NALU::kReserved16:
663        case media::H264NALU::kReserved17:
664        case media::H264NALU::kReserved18:
665          // These unconditionally signal a frame boundary.
666          end_of_frame = true;
667          break;
668        default:
669          // For all others, keep going.
670          break;
671      }
672      if (end_of_frame) {
673        if (!decoder_partial_frame_pending_ && *endpos == 0) {
674          // The frame was previously restarted, and we haven't filled the
675          // current frame with any contents yet.  Start the new frame here and
676          // continue parsing NALs.
677        } else {
678          // The frame wasn't previously restarted and/or we have contents for
679          // the current frame; signal the start of a new frame here: we don't
680          // have a partial frame anymore.
681          decoder_partial_frame_pending_ = false;
682          return true;
683        }
684      }
685      *endpos = (nalu.data + nalu.size) - data;
686    }
687    NOTREACHED();
688    return false;
689  } else {
690    DCHECK_GE(video_profile_, media::VP8PROFILE_MIN);
691    DCHECK_LE(video_profile_, media::VP8PROFILE_MAX);
692    // For VP8, we can just dump the entire buffer.  No fragmentation needed,
693    // and we never return a partial frame.
694    *endpos = size;
695    decoder_partial_frame_pending_ = false;
696    return true;
697  }
698}
699
700void V4L2VideoDecodeAccelerator::ScheduleDecodeBufferTaskIfNeeded() {
701  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
702
703  // If we're behind on tasks, schedule another one.
704  int buffers_to_decode = decoder_input_queue_.size();
705  if (decoder_current_bitstream_buffer_ != NULL)
706    buffers_to_decode++;
707  if (decoder_decode_buffer_tasks_scheduled_ < buffers_to_decode) {
708    decoder_decode_buffer_tasks_scheduled_++;
709    decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
710        &V4L2VideoDecodeAccelerator::DecodeBufferTask,
711        base::Unretained(this)));
712  }
713}
714
715bool V4L2VideoDecodeAccelerator::DecodeBufferInitial(
716    const void* data, size_t size, size_t* endpos) {
717  DVLOG(3) << "DecodeBufferInitial(): data=" << data << ", size=" << size;
718  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
719  DCHECK_NE(decoder_state_, kUninitialized);
720  DCHECK_NE(decoder_state_, kDecoding);
721  // Initial decode.  We haven't been able to get output stream format info yet.
722  // Get it, and start decoding.
723
724  // Copy in and send to HW.
725  if (!AppendToInputFrame(data, size))
726    return false;
727
728  // If we only have a partial frame, don't flush and process yet.
729  if (decoder_partial_frame_pending_)
730    return true;
731
732  if (!FlushInputFrame())
733    return false;
734
735  // Recycle buffers.
736  Dequeue();
737
738  // Check and see if we have format info yet.
739  struct v4l2_format format;
740  bool again = false;
741  if (!GetFormatInfo(&format, &again))
742    return false;
743
744  if (again) {
745    // Need more stream to decode format, return true and schedule next buffer.
746    *endpos = size;
747    return true;
748  }
749
750  // Run this initialization only on first startup.
751  if (decoder_state_ == kInitialized) {
752    DVLOG(3) << "DecodeBufferInitial(): running initialization";
753    // Success! Setup our parameters.
754    if (!CreateBuffersForFormat(format))
755      return false;
756
757    // We expect to process the initial buffer once during stream init to
758    // configure stream parameters, but will not consume the steam data on that
759    // iteration.  Subsequent iterations (including after reset) do not require
760    // the stream init step.
761    *endpos = 0;
762  } else {
763    *endpos = size;
764  }
765
766  decoder_state_ = kDecoding;
767  ScheduleDecodeBufferTaskIfNeeded();
768  return true;
769}
770
771bool V4L2VideoDecodeAccelerator::DecodeBufferContinue(
772    const void* data, size_t size) {
773  DVLOG(3) << "DecodeBufferContinue(): data=" << data << ", size=" << size;
774  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
775  DCHECK_EQ(decoder_state_, kDecoding);
776
777  // Both of these calls will set kError state if they fail.
778  // Only flush the frame if it's complete.
779  return (AppendToInputFrame(data, size) &&
780          (decoder_partial_frame_pending_ || FlushInputFrame()));
781}
782
783bool V4L2VideoDecodeAccelerator::AppendToInputFrame(
784    const void* data, size_t size) {
785  DVLOG(3) << "AppendToInputFrame()";
786  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
787  DCHECK_NE(decoder_state_, kUninitialized);
788  DCHECK_NE(decoder_state_, kResetting);
789  DCHECK_NE(decoder_state_, kError);
790  // This routine can handle data == NULL and size == 0, which occurs when
791  // we queue an empty buffer for the purposes of flushing the pipe.
792
793  // Flush if we're too big
794  if (decoder_current_input_buffer_ != -1) {
795    InputRecord& input_record =
796        input_buffer_map_[decoder_current_input_buffer_];
797    if (input_record.bytes_used + size > input_record.length) {
798      if (!FlushInputFrame())
799        return false;
800      decoder_current_input_buffer_ = -1;
801    }
802  }
803
804  // Try to get an available input buffer
805  if (decoder_current_input_buffer_ == -1) {
806    if (free_input_buffers_.empty()) {
807      // See if we can get more free buffers from HW
808      Dequeue();
809      if (free_input_buffers_.empty()) {
810        // Nope!
811        DVLOG(2) << "AppendToInputFrame(): stalled for input buffers";
812        return false;
813      }
814    }
815    decoder_current_input_buffer_ = free_input_buffers_.back();
816    free_input_buffers_.pop_back();
817    InputRecord& input_record =
818        input_buffer_map_[decoder_current_input_buffer_];
819    DCHECK_EQ(input_record.bytes_used, 0);
820    DCHECK_EQ(input_record.input_id, -1);
821    DCHECK(decoder_current_bitstream_buffer_ != NULL);
822    input_record.input_id = decoder_current_bitstream_buffer_->input_id;
823  }
824
825  DCHECK(data != NULL || size == 0);
826  if (size == 0) {
827    // If we asked for an empty buffer, return now.  We return only after
828    // getting the next input buffer, since we might actually want an empty
829    // input buffer for flushing purposes.
830    return true;
831  }
832
833  // Copy in to the buffer.
834  InputRecord& input_record =
835      input_buffer_map_[decoder_current_input_buffer_];
836  if (size > input_record.length - input_record.bytes_used) {
837    LOG(ERROR) << "AppendToInputFrame(): over-size frame, erroring";
838    NOTIFY_ERROR(UNREADABLE_INPUT);
839    return false;
840  }
841  memcpy(
842      reinterpret_cast<uint8*>(input_record.address) + input_record.bytes_used,
843      data,
844      size);
845  input_record.bytes_used += size;
846
847  return true;
848}
849
850bool V4L2VideoDecodeAccelerator::FlushInputFrame() {
851  DVLOG(3) << "FlushInputFrame()";
852  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
853  DCHECK_NE(decoder_state_, kUninitialized);
854  DCHECK_NE(decoder_state_, kResetting);
855  DCHECK_NE(decoder_state_, kError);
856
857  if (decoder_current_input_buffer_ == -1)
858    return true;
859
860  InputRecord& input_record =
861      input_buffer_map_[decoder_current_input_buffer_];
862  DCHECK_NE(input_record.input_id, -1);
863  DCHECK(input_record.input_id != kFlushBufferId ||
864         input_record.bytes_used == 0);
865  // * if input_id >= 0, this input buffer was prompted by a bitstream buffer we
866  //   got from the client.  We can skip it if it is empty.
867  // * if input_id < 0 (should be kFlushBufferId in this case), this input
868  //   buffer was prompted by a flush buffer, and should be queued even when
869  //   empty.
870  if (input_record.input_id >= 0 && input_record.bytes_used == 0) {
871    input_record.input_id = -1;
872    free_input_buffers_.push_back(decoder_current_input_buffer_);
873    decoder_current_input_buffer_ = -1;
874    return true;
875  }
876
877  // Queue it.
878  input_ready_queue_.push(decoder_current_input_buffer_);
879  decoder_current_input_buffer_ = -1;
880  DVLOG(3) << "FlushInputFrame(): submitting input_id="
881           << input_record.input_id;
882  // Enqueue once since there's new available input for it.
883  Enqueue();
884
885  return (decoder_state_ != kError);
886}
887
888void V4L2VideoDecodeAccelerator::ServiceDeviceTask(bool event_pending) {
889  DVLOG(3) << "ServiceDeviceTask()";
890  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
891  DCHECK_NE(decoder_state_, kUninitialized);
892  TRACE_EVENT0("Video Decoder", "V4L2VDA::ServiceDeviceTask");
893
894  if (decoder_state_ == kResetting) {
895    DVLOG(2) << "ServiceDeviceTask(): early out: kResetting state";
896    return;
897  } else if (decoder_state_ == kError) {
898    DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
899    return;
900  } else if (decoder_state_ == kChangingResolution) {
901    DVLOG(2) << "ServiceDeviceTask(): early out: kChangingResolution state";
902    return;
903  }
904
905  if (event_pending)
906    DequeueEvents();
907  Dequeue();
908  Enqueue();
909
910  // Clear the interrupt fd.
911  if (!device_->ClearDevicePollInterrupt()) {
912    NOTIFY_ERROR(PLATFORM_FAILURE);
913    return;
914  }
915
916  bool poll_device = false;
917  // Add fd, if we should poll on it.
918  // Can be polled as soon as either input or output buffers are queued.
919  if (input_buffer_queued_count_ + output_buffer_queued_count_ > 0)
920    poll_device = true;
921
922  // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
923  // so either:
924  // * device_poll_thread_ is running normally
925  // * device_poll_thread_ scheduled us, but then a ResetTask() or DestroyTask()
926  //   shut it down, in which case we're either in kResetting or kError states
927  //   respectively, and we should have early-outed already.
928  DCHECK(device_poll_thread_.message_loop());
929  // Queue the DevicePollTask() now.
930  device_poll_thread_.message_loop()->PostTask(
931      FROM_HERE,
932      base::Bind(&V4L2VideoDecodeAccelerator::DevicePollTask,
933                 base::Unretained(this),
934                 poll_device));
935
936  DVLOG(1) << "ServiceDeviceTask(): buffer counts: DEC["
937           << decoder_input_queue_.size() << "->"
938           << input_ready_queue_.size() << "] => DEVICE["
939           << free_input_buffers_.size() << "+"
940           << input_buffer_queued_count_ << "/"
941           << input_buffer_map_.size() << "->"
942           << free_output_buffers_.size() << "+"
943           << output_buffer_queued_count_ << "/"
944           << output_buffer_map_.size() << "] => VDA["
945           << decoder_frames_at_client_ << "]";
946
947  ScheduleDecodeBufferTaskIfNeeded();
948  StartResolutionChangeIfNeeded();
949}
950
951void V4L2VideoDecodeAccelerator::Enqueue() {
952  DVLOG(3) << "Enqueue()";
953  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
954  DCHECK_NE(decoder_state_, kUninitialized);
955  TRACE_EVENT0("Video Decoder", "V4L2VDA::Enqueue");
956
957  // Drain the pipe of completed decode buffers.
958  const int old_inputs_queued = input_buffer_queued_count_;
959  while (!input_ready_queue_.empty()) {
960    if (!EnqueueInputRecord())
961      return;
962  }
963  if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
964    // We just started up a previously empty queue.
965    // Queue state changed; signal interrupt.
966    if (!device_->SetDevicePollInterrupt()) {
967      DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
968      NOTIFY_ERROR(PLATFORM_FAILURE);
969      return;
970    }
971    // Start VIDIOC_STREAMON if we haven't yet.
972    if (!input_streamon_) {
973      __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
974      IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
975      input_streamon_ = true;
976    }
977  }
978
979  // Enqueue all the outputs we can.
980  const int old_outputs_queued = output_buffer_queued_count_;
981  while (!free_output_buffers_.empty()) {
982    if (!EnqueueOutputRecord())
983      return;
984  }
985  if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
986    // We just started up a previously empty queue.
987    // Queue state changed; signal interrupt.
988    if (!device_->SetDevicePollInterrupt()) {
989      DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
990      NOTIFY_ERROR(PLATFORM_FAILURE);
991      return;
992    }
993    // Start VIDIOC_STREAMON if we haven't yet.
994    if (!output_streamon_) {
995      __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
996      IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
997      output_streamon_ = true;
998    }
999  }
1000}
1001
1002void V4L2VideoDecodeAccelerator::DequeueEvents() {
1003  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1004  DCHECK_NE(decoder_state_, kUninitialized);
1005  DVLOG(3) << "DequeueEvents()";
1006
1007  struct v4l2_event ev;
1008  memset(&ev, 0, sizeof(ev));
1009
1010  while (device_->Ioctl(VIDIOC_DQEVENT, &ev) == 0) {
1011    if (ev.type == V4L2_EVENT_RESOLUTION_CHANGE) {
1012      DVLOG(3) << "DequeueEvents(): got resolution change event.";
1013      DCHECK(!resolution_change_pending_);
1014      resolution_change_pending_ = IsResolutionChangeNecessary();
1015    } else {
1016      DLOG(FATAL) << "DequeueEvents(): got an event (" << ev.type
1017                  << ") we haven't subscribed to.";
1018    }
1019  }
1020}
1021
1022void V4L2VideoDecodeAccelerator::Dequeue() {
1023  DVLOG(3) << "Dequeue()";
1024  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1025  DCHECK_NE(decoder_state_, kUninitialized);
1026  TRACE_EVENT0("Video Decoder", "V4L2VDA::Dequeue");
1027
1028  // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
1029  // list.
1030  while (input_buffer_queued_count_ > 0) {
1031    DCHECK(input_streamon_);
1032    struct v4l2_buffer dqbuf;
1033    struct v4l2_plane planes[1];
1034    memset(&dqbuf, 0, sizeof(dqbuf));
1035    memset(planes, 0, sizeof(planes));
1036    dqbuf.type   = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1037    dqbuf.memory = V4L2_MEMORY_MMAP;
1038    dqbuf.m.planes = planes;
1039    dqbuf.length = 1;
1040    if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1041      if (errno == EAGAIN) {
1042        // EAGAIN if we're just out of buffers to dequeue.
1043        break;
1044      }
1045      DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1046      NOTIFY_ERROR(PLATFORM_FAILURE);
1047      return;
1048    }
1049    InputRecord& input_record = input_buffer_map_[dqbuf.index];
1050    DCHECK(input_record.at_device);
1051    free_input_buffers_.push_back(dqbuf.index);
1052    input_record.at_device = false;
1053    input_record.bytes_used = 0;
1054    input_record.input_id = -1;
1055    input_buffer_queued_count_--;
1056  }
1057
1058  // Dequeue completed output (VIDEO_CAPTURE) buffers, and queue to the
1059  // completed queue.
1060  while (output_buffer_queued_count_ > 0) {
1061    DCHECK(output_streamon_);
1062    struct v4l2_buffer dqbuf;
1063    scoped_ptr<struct v4l2_plane[]> planes(
1064        new v4l2_plane[output_planes_count_]);
1065    memset(&dqbuf, 0, sizeof(dqbuf));
1066    memset(planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1067    dqbuf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1068    dqbuf.memory = V4L2_MEMORY_MMAP;
1069    dqbuf.m.planes = planes.get();
1070    dqbuf.length = output_planes_count_;
1071    if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
1072      if (errno == EAGAIN) {
1073        // EAGAIN if we're just out of buffers to dequeue.
1074        break;
1075      }
1076      DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
1077      NOTIFY_ERROR(PLATFORM_FAILURE);
1078      return;
1079    }
1080    OutputRecord& output_record = output_buffer_map_[dqbuf.index];
1081    DCHECK(output_record.at_device);
1082    DCHECK(!output_record.at_client);
1083    DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1084    DCHECK_NE(output_record.picture_id, -1);
1085    output_record.at_device = false;
1086    if (dqbuf.m.planes[0].bytesused + dqbuf.m.planes[1].bytesused == 0) {
1087      // This is an empty output buffer returned as part of a flush.
1088      free_output_buffers_.push(dqbuf.index);
1089    } else {
1090      DCHECK_GE(dqbuf.timestamp.tv_sec, 0);
1091      output_record.at_client = true;
1092      DVLOG(3) << "Dequeue(): returning input_id=" << dqbuf.timestamp.tv_sec
1093               << " as picture_id=" << output_record.picture_id;
1094      const media::Picture& picture =
1095          media::Picture(output_record.picture_id,
1096                         dqbuf.timestamp.tv_sec,
1097                         gfx::Rect(frame_buffer_size_));
1098      pending_picture_ready_.push(
1099          PictureRecord(output_record.cleared, picture));
1100      SendPictureReady();
1101      output_record.cleared = true;
1102      decoder_frames_at_client_++;
1103    }
1104    output_buffer_queued_count_--;
1105  }
1106
1107  NotifyFlushDoneIfNeeded();
1108}
1109
1110bool V4L2VideoDecodeAccelerator::EnqueueInputRecord() {
1111  DVLOG(3) << "EnqueueInputRecord()";
1112  DCHECK(!input_ready_queue_.empty());
1113
1114  // Enqueue an input (VIDEO_OUTPUT) buffer.
1115  const int buffer = input_ready_queue_.front();
1116  InputRecord& input_record = input_buffer_map_[buffer];
1117  DCHECK(!input_record.at_device);
1118  struct v4l2_buffer qbuf;
1119  struct v4l2_plane qbuf_plane;
1120  memset(&qbuf, 0, sizeof(qbuf));
1121  memset(&qbuf_plane, 0, sizeof(qbuf_plane));
1122  qbuf.index                 = buffer;
1123  qbuf.type                  = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1124  qbuf.timestamp.tv_sec      = input_record.input_id;
1125  qbuf.memory                = V4L2_MEMORY_MMAP;
1126  qbuf.m.planes              = &qbuf_plane;
1127  qbuf.m.planes[0].bytesused = input_record.bytes_used;
1128  qbuf.length                = 1;
1129  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1130  input_ready_queue_.pop();
1131  input_record.at_device = true;
1132  input_buffer_queued_count_++;
1133  DVLOG(3) << "EnqueueInputRecord(): enqueued input_id="
1134           << input_record.input_id << " size="  << input_record.bytes_used;
1135  return true;
1136}
1137
1138bool V4L2VideoDecodeAccelerator::EnqueueOutputRecord() {
1139  DVLOG(3) << "EnqueueOutputRecord()";
1140  DCHECK(!free_output_buffers_.empty());
1141
1142  // Enqueue an output (VIDEO_CAPTURE) buffer.
1143  const int buffer = free_output_buffers_.front();
1144  OutputRecord& output_record = output_buffer_map_[buffer];
1145  DCHECK(!output_record.at_device);
1146  DCHECK(!output_record.at_client);
1147  DCHECK_NE(output_record.egl_image, EGL_NO_IMAGE_KHR);
1148  DCHECK_NE(output_record.picture_id, -1);
1149  if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1150    TRACE_EVENT0("Video Decoder",
1151                 "V4L2VDA::EnqueueOutputRecord: eglClientWaitSyncKHR");
1152    // If we have to wait for completion, wait.  Note that
1153    // free_output_buffers_ is a FIFO queue, so we always wait on the
1154    // buffer that has been in the queue the longest.
1155    if (eglClientWaitSyncKHR(egl_display_, output_record.egl_sync, 0,
1156                             EGL_FOREVER_KHR) == EGL_FALSE) {
1157      // This will cause tearing, but is safe otherwise.
1158      DVLOG(1) << __func__ << " eglClientWaitSyncKHR failed!";
1159    }
1160    if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1161      DLOG(FATAL) << __func__ << " eglDestroySyncKHR failed!";
1162      NOTIFY_ERROR(PLATFORM_FAILURE);
1163      return false;
1164    }
1165    output_record.egl_sync = EGL_NO_SYNC_KHR;
1166  }
1167  struct v4l2_buffer qbuf;
1168  scoped_ptr<struct v4l2_plane[]> qbuf_planes(
1169      new v4l2_plane[output_planes_count_]);
1170  memset(&qbuf, 0, sizeof(qbuf));
1171  memset(
1172      qbuf_planes.get(), 0, sizeof(struct v4l2_plane) * output_planes_count_);
1173  qbuf.index    = buffer;
1174  qbuf.type     = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1175  qbuf.memory   = V4L2_MEMORY_MMAP;
1176  qbuf.m.planes = qbuf_planes.get();
1177  qbuf.length = output_planes_count_;
1178  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
1179  free_output_buffers_.pop();
1180  output_record.at_device = true;
1181  output_buffer_queued_count_++;
1182  return true;
1183}
1184
1185void V4L2VideoDecodeAccelerator::ReusePictureBufferTask(
1186    int32 picture_buffer_id, scoped_ptr<EGLSyncKHRRef> egl_sync_ref) {
1187  DVLOG(3) << "ReusePictureBufferTask(): picture_buffer_id="
1188           << picture_buffer_id;
1189  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1190  TRACE_EVENT0("Video Decoder", "V4L2VDA::ReusePictureBufferTask");
1191
1192  // We run ReusePictureBufferTask even if we're in kResetting.
1193  if (decoder_state_ == kError) {
1194    DVLOG(2) << "ReusePictureBufferTask(): early out: kError state";
1195    return;
1196  }
1197
1198  if (decoder_state_ == kChangingResolution) {
1199    DVLOG(2) << "ReusePictureBufferTask(): early out: kChangingResolution";
1200    return;
1201  }
1202
1203  size_t index;
1204  for (index = 0; index < output_buffer_map_.size(); ++index)
1205    if (output_buffer_map_[index].picture_id == picture_buffer_id)
1206      break;
1207
1208  if (index >= output_buffer_map_.size()) {
1209    // It's possible that we've already posted a DismissPictureBuffer for this
1210    // picture, but it has not yet executed when this ReusePictureBuffer was
1211    // posted to us by the client. In that case just ignore this (we've already
1212    // dismissed it and accounted for that) and let the sync object get
1213    // destroyed.
1214    DVLOG(4) << "ReusePictureBufferTask(): got picture id= "
1215             << picture_buffer_id << " not in use (anymore?).";
1216    return;
1217  }
1218
1219  OutputRecord& output_record = output_buffer_map_[index];
1220  if (output_record.at_device || !output_record.at_client) {
1221    DLOG(ERROR) << "ReusePictureBufferTask(): picture_buffer_id not reusable";
1222    NOTIFY_ERROR(INVALID_ARGUMENT);
1223    return;
1224  }
1225
1226  DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1227  DCHECK(!output_record.at_device);
1228  output_record.at_client = false;
1229  output_record.egl_sync = egl_sync_ref->egl_sync;
1230  free_output_buffers_.push(index);
1231  decoder_frames_at_client_--;
1232  // Take ownership of the EGLSync.
1233  egl_sync_ref->egl_sync = EGL_NO_SYNC_KHR;
1234  // We got a buffer back, so enqueue it back.
1235  Enqueue();
1236}
1237
1238void V4L2VideoDecodeAccelerator::FlushTask() {
1239  DVLOG(3) << "FlushTask()";
1240  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1241  TRACE_EVENT0("Video Decoder", "V4L2VDA::FlushTask");
1242
1243  // Flush outstanding buffers.
1244  if (decoder_state_ == kInitialized || decoder_state_ == kAfterReset) {
1245    // There's nothing in the pipe, so return done immediately.
1246    DVLOG(3) << "FlushTask(): returning flush";
1247    child_message_loop_proxy_->PostTask(
1248        FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
1249    return;
1250  } else if (decoder_state_ == kError) {
1251    DVLOG(2) << "FlushTask(): early out: kError state";
1252    return;
1253  }
1254
1255  // We don't support stacked flushing.
1256  DCHECK(!decoder_flushing_);
1257
1258  // Queue up an empty buffer -- this triggers the flush.
1259  decoder_input_queue_.push(
1260      linked_ptr<BitstreamBufferRef>(new BitstreamBufferRef(
1261          io_client_, io_message_loop_proxy_, NULL, 0, kFlushBufferId)));
1262  decoder_flushing_ = true;
1263  SendPictureReady();  // Send all pending PictureReady.
1264
1265  ScheduleDecodeBufferTaskIfNeeded();
1266}
1267
1268void V4L2VideoDecodeAccelerator::NotifyFlushDoneIfNeeded() {
1269  if (!decoder_flushing_)
1270    return;
1271
1272  // Pipeline is empty when:
1273  // * Decoder input queue is empty of non-delayed buffers.
1274  // * There is no currently filling input buffer.
1275  // * Input holding queue is empty.
1276  // * All input (VIDEO_OUTPUT) buffers are returned.
1277  if (!decoder_input_queue_.empty()) {
1278    if (decoder_input_queue_.front()->input_id !=
1279        decoder_delay_bitstream_buffer_id_)
1280      return;
1281  }
1282  if (decoder_current_input_buffer_ != -1)
1283    return;
1284  if ((input_ready_queue_.size() + input_buffer_queued_count_) != 0)
1285    return;
1286
1287  // TODO(posciak): crbug.com/270039. Exynos requires a streamoff-streamon
1288  // sequence after flush to continue, even if we are not resetting. This would
1289  // make sense, because we don't really want to resume from a non-resume point
1290  // (e.g. not from an IDR) if we are flushed.
1291  // MSE player however triggers a Flush() on chunk end, but never Reset(). One
1292  // could argue either way, or even say that Flush() is not needed/harmful when
1293  // transitioning to next chunk.
1294  // For now, do the streamoff-streamon cycle to satisfy Exynos and not freeze
1295  // when doing MSE. This should be harmless otherwise.
1296  if (!StopDevicePoll(false))
1297    return;
1298
1299  if (!StartDevicePoll())
1300    return;
1301
1302  decoder_delay_bitstream_buffer_id_ = -1;
1303  decoder_flushing_ = false;
1304  DVLOG(3) << "NotifyFlushDoneIfNeeded(): returning flush";
1305  child_message_loop_proxy_->PostTask(
1306      FROM_HERE, base::Bind(&Client::NotifyFlushDone, client_));
1307
1308  // While we were flushing, we early-outed DecodeBufferTask()s.
1309  ScheduleDecodeBufferTaskIfNeeded();
1310}
1311
1312void V4L2VideoDecodeAccelerator::ResetTask() {
1313  DVLOG(3) << "ResetTask()";
1314  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1315  TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetTask");
1316
1317  if (decoder_state_ == kError) {
1318    DVLOG(2) << "ResetTask(): early out: kError state";
1319    return;
1320  }
1321
1322  // If we are in the middle of switching resolutions, postpone reset until
1323  // it's done. We don't have to worry about timing of this wrt to decoding,
1324  // because input pipe is already stopped if we are changing resolution.
1325  // We will come back here after we are done with the resolution change.
1326  DCHECK(!resolution_change_reset_pending_);
1327  if (resolution_change_pending_ || decoder_state_ == kChangingResolution) {
1328    resolution_change_reset_pending_ = true;
1329    return;
1330  }
1331
1332  // We stop streaming and clear buffer tracking info (not preserving inputs).
1333  // StopDevicePoll() unconditionally does _not_ destroy buffers, however.
1334  if (!StopDevicePoll(false))
1335    return;
1336
1337  decoder_current_bitstream_buffer_.reset();
1338  while (!decoder_input_queue_.empty())
1339    decoder_input_queue_.pop();
1340
1341  decoder_current_input_buffer_ = -1;
1342
1343  // If we were flushing, we'll never return any more BitstreamBuffers or
1344  // PictureBuffers; they have all been dropped and returned by now.
1345  NotifyFlushDoneIfNeeded();
1346
1347  // Mark that we're resetting, then enqueue a ResetDoneTask().  All intervening
1348  // jobs will early-out in the kResetting state.
1349  decoder_state_ = kResetting;
1350  SendPictureReady();  // Send all pending PictureReady.
1351  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1352      &V4L2VideoDecodeAccelerator::ResetDoneTask, base::Unretained(this)));
1353}
1354
1355void V4L2VideoDecodeAccelerator::ResetDoneTask() {
1356  DVLOG(3) << "ResetDoneTask()";
1357  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1358  TRACE_EVENT0("Video Decoder", "V4L2VDA::ResetDoneTask");
1359
1360  if (decoder_state_ == kError) {
1361    DVLOG(2) << "ResetDoneTask(): early out: kError state";
1362    return;
1363  }
1364
1365  if (!StartDevicePoll())
1366    return;
1367
1368  // We might have received a resolution change event while we were waiting
1369  // for the reset to finish. The codec will not post another event if the
1370  // resolution after reset remains the same as the one to which were just
1371  // about to switch, so preserve the event across reset so we can address
1372  // it after resuming.
1373
1374  // Reset format-specific bits.
1375  if (video_profile_ >= media::H264PROFILE_MIN &&
1376      video_profile_ <= media::H264PROFILE_MAX) {
1377    decoder_h264_parser_.reset(new media::H264Parser());
1378  }
1379
1380  // Jobs drained, we're finished resetting.
1381  DCHECK_EQ(decoder_state_, kResetting);
1382  if (output_buffer_map_.empty()) {
1383    // We must have gotten Reset() before we had a chance to request buffers
1384    // from the client.
1385    decoder_state_ = kInitialized;
1386  } else {
1387    decoder_state_ = kAfterReset;
1388  }
1389
1390  decoder_partial_frame_pending_ = false;
1391  decoder_delay_bitstream_buffer_id_ = -1;
1392  child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1393      &Client::NotifyResetDone, client_));
1394
1395  // While we were resetting, we early-outed DecodeBufferTask()s.
1396  ScheduleDecodeBufferTaskIfNeeded();
1397}
1398
1399void V4L2VideoDecodeAccelerator::DestroyTask() {
1400  DVLOG(3) << "DestroyTask()";
1401  TRACE_EVENT0("Video Decoder", "V4L2VDA::DestroyTask");
1402
1403  // DestroyTask() should run regardless of decoder_state_.
1404
1405  // Stop streaming and the device_poll_thread_.
1406  StopDevicePoll(false);
1407
1408  decoder_current_bitstream_buffer_.reset();
1409  decoder_current_input_buffer_ = -1;
1410  decoder_decode_buffer_tasks_scheduled_ = 0;
1411  decoder_frames_at_client_ = 0;
1412  while (!decoder_input_queue_.empty())
1413    decoder_input_queue_.pop();
1414  decoder_flushing_ = false;
1415
1416  // Set our state to kError.  Just in case.
1417  decoder_state_ = kError;
1418}
1419
1420bool V4L2VideoDecodeAccelerator::StartDevicePoll() {
1421  DVLOG(3) << "StartDevicePoll()";
1422  DCHECK(!device_poll_thread_.IsRunning());
1423  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1424
1425  // Start up the device poll thread and schedule its first DevicePollTask().
1426  if (!device_poll_thread_.Start()) {
1427    DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
1428    NOTIFY_ERROR(PLATFORM_FAILURE);
1429    return false;
1430  }
1431  device_poll_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1432      &V4L2VideoDecodeAccelerator::DevicePollTask,
1433      base::Unretained(this),
1434      0));
1435
1436  return true;
1437}
1438
1439bool V4L2VideoDecodeAccelerator::StopDevicePoll(bool keep_input_state) {
1440  DVLOG(3) << "StopDevicePoll()";
1441  if (decoder_thread_.IsRunning())
1442    DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1443
1444  // Signal the DevicePollTask() to stop, and stop the device poll thread.
1445  if (!device_->SetDevicePollInterrupt()) {
1446    DPLOG(ERROR) << "SetDevicePollInterrupt(): failed";
1447    NOTIFY_ERROR(PLATFORM_FAILURE);
1448    return false;
1449  }
1450  device_poll_thread_.Stop();
1451  // Clear the interrupt now, to be sure.
1452  if (!device_->ClearDevicePollInterrupt()) {
1453    NOTIFY_ERROR(PLATFORM_FAILURE);
1454    return false;
1455  }
1456
1457  // Stop streaming.
1458  if (!keep_input_state) {
1459    if (input_streamon_) {
1460      __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1461      IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1462    }
1463    input_streamon_ = false;
1464  }
1465  if (output_streamon_) {
1466    __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1467    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
1468  }
1469  output_streamon_ = false;
1470
1471  // Reset all our accounting info.
1472  if (!keep_input_state) {
1473    while (!input_ready_queue_.empty())
1474      input_ready_queue_.pop();
1475    free_input_buffers_.clear();
1476    for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1477      free_input_buffers_.push_back(i);
1478      input_buffer_map_[i].at_device = false;
1479      input_buffer_map_[i].bytes_used = 0;
1480      input_buffer_map_[i].input_id = -1;
1481    }
1482    input_buffer_queued_count_ = 0;
1483  }
1484
1485  while (!free_output_buffers_.empty())
1486    free_output_buffers_.pop();
1487
1488  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1489    OutputRecord& output_record = output_buffer_map_[i];
1490    DCHECK(!(output_record.at_client && output_record.at_device));
1491
1492    // After streamoff, the device drops ownership of all buffers, even if
1493    // we don't dequeue them explicitly.
1494    output_buffer_map_[i].at_device = false;
1495    // Some of them may still be owned by the client however.
1496    // Reuse only those that aren't.
1497    if (!output_record.at_client) {
1498      DCHECK_EQ(output_record.egl_sync, EGL_NO_SYNC_KHR);
1499      free_output_buffers_.push(i);
1500    }
1501  }
1502  output_buffer_queued_count_ = 0;
1503
1504  DVLOG(3) << "StopDevicePoll(): device poll stopped";
1505  return true;
1506}
1507
1508void V4L2VideoDecodeAccelerator::StartResolutionChangeIfNeeded() {
1509  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1510  DCHECK_NE(decoder_state_, kUninitialized);
1511  DCHECK_NE(decoder_state_, kResetting);
1512
1513  if (!resolution_change_pending_)
1514    return;
1515
1516  DVLOG(3) << "No more work, initiate resolution change";
1517
1518  // Keep input queue.
1519  if (!StopDevicePoll(true))
1520    return;
1521
1522  decoder_state_ = kChangingResolution;
1523  DCHECK(resolution_change_pending_);
1524  resolution_change_pending_ = false;
1525
1526  // Post a task to clean up buffers on child thread. This will also ensure
1527  // that we won't accept ReusePictureBuffer() anymore after that.
1528  child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1529      &V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers,
1530      weak_this_));
1531}
1532
1533void V4L2VideoDecodeAccelerator::FinishResolutionChange() {
1534  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1535  DCHECK_EQ(decoder_state_, kChangingResolution);
1536  DVLOG(3) << "FinishResolutionChange()";
1537
1538  if (decoder_state_ == kError) {
1539    DVLOG(2) << "FinishResolutionChange(): early out: kError state";
1540    return;
1541  }
1542
1543  struct v4l2_format format;
1544  bool again;
1545  bool ret = GetFormatInfo(&format, &again);
1546  if (!ret || again) {
1547    DVLOG(3) << "Couldn't get format information after resolution change";
1548    NOTIFY_ERROR(PLATFORM_FAILURE);
1549    return;
1550  }
1551
1552  if (!CreateBuffersForFormat(format)) {
1553    DVLOG(3) << "Couldn't reallocate buffers after resolution change";
1554    NOTIFY_ERROR(PLATFORM_FAILURE);
1555    return;
1556  }
1557
1558  decoder_state_ = kDecoding;
1559
1560  if (resolution_change_reset_pending_) {
1561    resolution_change_reset_pending_ = false;
1562    ResetTask();
1563    return;
1564  }
1565
1566  if (!StartDevicePoll())
1567    return;
1568
1569  Enqueue();
1570  ScheduleDecodeBufferTaskIfNeeded();
1571}
1572
1573void V4L2VideoDecodeAccelerator::DevicePollTask(bool poll_device) {
1574  DVLOG(3) << "DevicePollTask()";
1575  DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
1576  TRACE_EVENT0("Video Decoder", "V4L2VDA::DevicePollTask");
1577
1578  bool event_pending = false;
1579
1580  if (!device_->Poll(poll_device, &event_pending)) {
1581    NOTIFY_ERROR(PLATFORM_FAILURE);
1582    return;
1583  }
1584
1585  // All processing should happen on ServiceDeviceTask(), since we shouldn't
1586  // touch decoder state from this thread.
1587  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1588      &V4L2VideoDecodeAccelerator::ServiceDeviceTask,
1589      base::Unretained(this), event_pending));
1590}
1591
1592void V4L2VideoDecodeAccelerator::NotifyError(Error error) {
1593  DVLOG(2) << "NotifyError()";
1594
1595  if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
1596    child_message_loop_proxy_->PostTask(FROM_HERE, base::Bind(
1597        &V4L2VideoDecodeAccelerator::NotifyError, weak_this_, error));
1598    return;
1599  }
1600
1601  if (client_) {
1602    client_->NotifyError(error);
1603    client_ptr_factory_.reset();
1604  }
1605}
1606
1607void V4L2VideoDecodeAccelerator::SetDecoderState(State state) {
1608  DVLOG(3) << "SetDecoderState(): state=" << state;
1609
1610  // We can touch decoder_state_ only if this is the decoder thread or the
1611  // decoder thread isn't running.
1612  if (decoder_thread_.message_loop() != NULL &&
1613      decoder_thread_.message_loop() != base::MessageLoop::current()) {
1614    decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1615        &V4L2VideoDecodeAccelerator::SetDecoderState,
1616        base::Unretained(this), state));
1617  } else {
1618    decoder_state_ = state;
1619  }
1620}
1621
1622bool V4L2VideoDecodeAccelerator::GetFormatInfo(struct v4l2_format* format,
1623                                                 bool* again) {
1624  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1625
1626  *again = false;
1627  memset(format, 0, sizeof(*format));
1628  format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1629  if (device_->Ioctl(VIDIOC_G_FMT, format) != 0) {
1630    if (errno == EINVAL) {
1631      // EINVAL means we haven't seen sufficient stream to decode the format.
1632      *again = true;
1633      return true;
1634    } else {
1635      DPLOG(ERROR) << __func__ << "(): ioctl() failed: VIDIOC_G_FMT";
1636      NOTIFY_ERROR(PLATFORM_FAILURE);
1637      return false;
1638    }
1639  }
1640
1641  return true;
1642}
1643
1644bool V4L2VideoDecodeAccelerator::CreateBuffersForFormat(
1645    const struct v4l2_format& format) {
1646  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1647  output_planes_count_ = format.fmt.pix_mp.num_planes;
1648  frame_buffer_size_.SetSize(
1649      format.fmt.pix_mp.width, format.fmt.pix_mp.height);
1650  DVLOG(3) << "CreateBuffersForFormat(): new resolution: "
1651           << frame_buffer_size_.ToString();
1652
1653  if (!CreateOutputBuffers())
1654    return false;
1655
1656  return true;
1657}
1658
1659bool V4L2VideoDecodeAccelerator::CreateInputBuffers() {
1660  DVLOG(3) << "CreateInputBuffers()";
1661  // We always run this as we prepare to initialize.
1662  DCHECK_EQ(decoder_state_, kUninitialized);
1663  DCHECK(!input_streamon_);
1664  DCHECK(input_buffer_map_.empty());
1665
1666  __u32 pixelformat = V4L2Device::VideoCodecProfileToV4L2PixFmt(video_profile_);
1667  if (!pixelformat) {
1668    NOTREACHED();
1669    return false;
1670  }
1671
1672  struct v4l2_format format;
1673  memset(&format, 0, sizeof(format));
1674  format.type                              = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1675  format.fmt.pix_mp.pixelformat            = pixelformat;
1676  if (CommandLine::ForCurrentProcess()->HasSwitch(
1677          switches::kIgnoreResolutionLimitsForAcceleratedVideoDecode))
1678    format.fmt.pix_mp.plane_fmt[0].sizeimage = kInputBufferMaxSizeFor4k;
1679  else
1680    format.fmt.pix_mp.plane_fmt[0].sizeimage = kInputBufferMaxSizeFor1080p;
1681  format.fmt.pix_mp.num_planes             = 1;
1682  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
1683
1684  struct v4l2_requestbuffers reqbufs;
1685  memset(&reqbufs, 0, sizeof(reqbufs));
1686  reqbufs.count  = kInputBufferCount;
1687  reqbufs.type   = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1688  reqbufs.memory = V4L2_MEMORY_MMAP;
1689  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1690  input_buffer_map_.resize(reqbufs.count);
1691  for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1692    free_input_buffers_.push_back(i);
1693
1694    // Query for the MEMORY_MMAP pointer.
1695    struct v4l2_plane planes[1];
1696    struct v4l2_buffer buffer;
1697    memset(&buffer, 0, sizeof(buffer));
1698    memset(planes, 0, sizeof(planes));
1699    buffer.index    = i;
1700    buffer.type     = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1701    buffer.memory   = V4L2_MEMORY_MMAP;
1702    buffer.m.planes = planes;
1703    buffer.length   = 1;
1704    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1705    void* address = device_->Mmap(NULL,
1706                                  buffer.m.planes[0].length,
1707                                  PROT_READ | PROT_WRITE,
1708                                  MAP_SHARED,
1709                                  buffer.m.planes[0].m.mem_offset);
1710    if (address == MAP_FAILED) {
1711      DPLOG(ERROR) << "CreateInputBuffers(): mmap() failed";
1712      return false;
1713    }
1714    input_buffer_map_[i].address = address;
1715    input_buffer_map_[i].length = buffer.m.planes[0].length;
1716  }
1717
1718  return true;
1719}
1720
1721bool V4L2VideoDecodeAccelerator::CreateOutputBuffers() {
1722  DVLOG(3) << "CreateOutputBuffers()";
1723  DCHECK(decoder_state_ == kInitialized ||
1724         decoder_state_ == kChangingResolution);
1725  DCHECK(!output_streamon_);
1726  DCHECK(output_buffer_map_.empty());
1727
1728  // Number of output buffers we need.
1729  struct v4l2_control ctrl;
1730  memset(&ctrl, 0, sizeof(ctrl));
1731  ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1732  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1733  output_dpb_size_ = ctrl.value;
1734
1735  // Output format setup in Initialize().
1736
1737  // Allocate the output buffers.
1738  struct v4l2_requestbuffers reqbufs;
1739  memset(&reqbufs, 0, sizeof(reqbufs));
1740  reqbufs.count  = output_dpb_size_ + kDpbOutputBufferExtraCount;
1741  reqbufs.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1742  reqbufs.memory = V4L2_MEMORY_MMAP;
1743  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1744
1745  output_buffer_map_.resize(reqbufs.count);
1746
1747  DVLOG(3) << "CreateOutputBuffers(): ProvidePictureBuffers(): "
1748           << "buffer_count=" << output_buffer_map_.size()
1749           << ", width=" << frame_buffer_size_.width()
1750           << ", height=" << frame_buffer_size_.height();
1751  child_message_loop_proxy_->PostTask(FROM_HERE,
1752                                      base::Bind(&Client::ProvidePictureBuffers,
1753                                                 client_,
1754                                                 output_buffer_map_.size(),
1755                                                 frame_buffer_size_,
1756                                                 device_->GetTextureTarget()));
1757
1758  // Wait for the client to call AssignPictureBuffers() on the Child thread.
1759  // We do this, because if we continue decoding without finishing buffer
1760  // allocation, we may end up Resetting before AssignPictureBuffers arrives,
1761  // resulting in unnecessary complications and subtle bugs.
1762  // For example, if the client calls Decode(Input1), Reset(), Decode(Input2)
1763  // in a sequence, and Decode(Input1) results in us getting here and exiting
1764  // without waiting, we might end up running Reset{,Done}Task() before
1765  // AssignPictureBuffers is scheduled, thus cleaning up and pushing buffers
1766  // to the free_output_buffers_ map twice. If we somehow marked buffers as
1767  // not ready, we'd need special handling for restarting the second Decode
1768  // task and delaying it anyway.
1769  // Waiting here is not very costly and makes reasoning about different
1770  // situations much simpler.
1771  pictures_assigned_.Wait();
1772
1773  Enqueue();
1774  return true;
1775}
1776
1777void V4L2VideoDecodeAccelerator::DestroyInputBuffers() {
1778  DVLOG(3) << "DestroyInputBuffers()";
1779  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1780  DCHECK(!input_streamon_);
1781
1782  for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
1783    if (input_buffer_map_[i].address != NULL) {
1784      device_->Munmap(input_buffer_map_[i].address,
1785                      input_buffer_map_[i].length);
1786    }
1787  }
1788
1789  struct v4l2_requestbuffers reqbufs;
1790  memset(&reqbufs, 0, sizeof(reqbufs));
1791  reqbufs.count = 0;
1792  reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1793  reqbufs.memory = V4L2_MEMORY_MMAP;
1794  IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1795
1796  input_buffer_map_.clear();
1797  free_input_buffers_.clear();
1798}
1799
1800bool V4L2VideoDecodeAccelerator::DestroyOutputBuffers() {
1801  DVLOG(3) << "DestroyOutputBuffers()";
1802  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1803  DCHECK(!output_streamon_);
1804  bool success = true;
1805
1806  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1807    OutputRecord& output_record = output_buffer_map_[i];
1808
1809    if (output_record.egl_image != EGL_NO_IMAGE_KHR) {
1810      if (device_->DestroyEGLImage(egl_display_, output_record.egl_image) !=
1811          EGL_TRUE) {
1812        DVLOG(1) << __func__ << " DestroyEGLImage failed.";
1813        success = false;
1814      }
1815    }
1816
1817    if (output_record.egl_sync != EGL_NO_SYNC_KHR) {
1818      if (eglDestroySyncKHR(egl_display_, output_record.egl_sync) != EGL_TRUE) {
1819        DVLOG(1) << __func__ << " eglDestroySyncKHR failed.";
1820        success = false;
1821      }
1822    }
1823
1824    DVLOG(1) << "DestroyOutputBuffers(): dismissing PictureBuffer id="
1825             << output_record.picture_id;
1826    child_message_loop_proxy_->PostTask(
1827        FROM_HERE,
1828        base::Bind(
1829            &Client::DismissPictureBuffer, client_, output_record.picture_id));
1830  }
1831
1832  struct v4l2_requestbuffers reqbufs;
1833  memset(&reqbufs, 0, sizeof(reqbufs));
1834  reqbufs.count = 0;
1835  reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1836  reqbufs.memory = V4L2_MEMORY_MMAP;
1837  if (device_->Ioctl(VIDIOC_REQBUFS, &reqbufs) != 0) {
1838    DPLOG(ERROR) << "DestroyOutputBuffers() ioctl() failed: VIDIOC_REQBUFS";
1839    success = false;
1840  }
1841
1842  output_buffer_map_.clear();
1843  while (!free_output_buffers_.empty())
1844    free_output_buffers_.pop();
1845
1846  return success;
1847}
1848
1849void V4L2VideoDecodeAccelerator::ResolutionChangeDestroyBuffers() {
1850  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1851  DVLOG(3) << "ResolutionChangeDestroyBuffers()";
1852
1853  if (!DestroyOutputBuffers()) {
1854    DLOG(FATAL) << __func__ << " Failed destroying output buffers.";
1855    NOTIFY_ERROR(PLATFORM_FAILURE);
1856    return;
1857  }
1858
1859  // Finish resolution change on decoder thread.
1860  decoder_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
1861      &V4L2VideoDecodeAccelerator::FinishResolutionChange,
1862      base::Unretained(this)));
1863}
1864
1865void V4L2VideoDecodeAccelerator::SendPictureReady() {
1866  DVLOG(3) << "SendPictureReady()";
1867  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1868  bool resetting_or_flushing =
1869      (decoder_state_ == kResetting || decoder_flushing_);
1870  while (pending_picture_ready_.size() > 0) {
1871    bool cleared = pending_picture_ready_.front().cleared;
1872    const media::Picture& picture = pending_picture_ready_.front().picture;
1873    if (cleared && picture_clearing_count_ == 0) {
1874      // This picture is cleared. Post it to IO thread to reduce latency. This
1875      // should be the case after all pictures are cleared at the beginning.
1876      io_message_loop_proxy_->PostTask(
1877          FROM_HERE, base::Bind(&Client::PictureReady, io_client_, picture));
1878      pending_picture_ready_.pop();
1879    } else if (!cleared || resetting_or_flushing) {
1880      DVLOG(3) << "SendPictureReady()"
1881               << ". cleared=" << pending_picture_ready_.front().cleared
1882               << ", decoder_state_=" << decoder_state_
1883               << ", decoder_flushing_=" << decoder_flushing_
1884               << ", picture_clearing_count_=" << picture_clearing_count_;
1885      // If the picture is not cleared, post it to the child thread because it
1886      // has to be cleared in the child thread. A picture only needs to be
1887      // cleared once. If the decoder is resetting or flushing, send all
1888      // pictures to ensure PictureReady arrive before reset or flush done.
1889      child_message_loop_proxy_->PostTaskAndReply(
1890          FROM_HERE,
1891          base::Bind(&Client::PictureReady, client_, picture),
1892          // Unretained is safe. If Client::PictureReady gets to run, |this| is
1893          // alive. Destroy() will wait the decode thread to finish.
1894          base::Bind(&V4L2VideoDecodeAccelerator::PictureCleared,
1895                     base::Unretained(this)));
1896      picture_clearing_count_++;
1897      pending_picture_ready_.pop();
1898    } else {
1899      // This picture is cleared. But some pictures are about to be cleared on
1900      // the child thread. To preserve the order, do not send this until those
1901      // pictures are cleared.
1902      break;
1903    }
1904  }
1905}
1906
1907void V4L2VideoDecodeAccelerator::PictureCleared() {
1908  DVLOG(3) << "PictureCleared(). clearing count=" << picture_clearing_count_;
1909  DCHECK_EQ(decoder_thread_.message_loop(), base::MessageLoop::current());
1910  DCHECK_GT(picture_clearing_count_, 0);
1911  picture_clearing_count_--;
1912  SendPictureReady();
1913}
1914
1915bool V4L2VideoDecodeAccelerator::IsResolutionChangeNecessary() {
1916  DVLOG(3) << "IsResolutionChangeNecessary() ";
1917
1918  struct v4l2_control ctrl;
1919  memset(&ctrl, 0, sizeof(ctrl));
1920  ctrl.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
1921  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_G_CTRL, &ctrl);
1922  if (ctrl.value != output_dpb_size_) {
1923    DVLOG(3)
1924        << "IsResolutionChangeNecessary(): Returning true since DPB mismatch ";
1925    return true;
1926  }
1927  struct v4l2_format format;
1928  bool again = false;
1929  bool ret = GetFormatInfo(&format, &again);
1930  if (!ret || again) {
1931    DVLOG(3) << "IsResolutionChangeNecessary(): GetFormatInfo() failed";
1932    return false;
1933  }
1934  gfx::Size new_size(base::checked_cast<int>(format.fmt.pix_mp.width),
1935                     base::checked_cast<int>(format.fmt.pix_mp.height));
1936  if (frame_buffer_size_ != new_size) {
1937    DVLOG(3) << "IsResolutionChangeNecessary(): Resolution change detected";
1938    return true;
1939  }
1940  return false;
1941}
1942
1943}  // namespace content
1944