1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include <fcntl.h>
6#include <linux/videodev2.h>
7#include <poll.h>
8#include <sys/eventfd.h>
9#include <sys/ioctl.h>
10#include <sys/mman.h>
11
12#include "base/callback.h"
13#include "base/command_line.h"
14#include "base/debug/trace_event.h"
15#include "base/message_loop/message_loop_proxy.h"
16#include "base/numerics/safe_conversions.h"
17#include "content/common/gpu/media/v4l2_video_encode_accelerator.h"
18#include "content/public/common/content_switches.h"
19#include "media/base/bitstream_buffer.h"
20
21#define NOTIFY_ERROR(x)                            \
22  do {                                             \
23    SetEncoderState(kError);                       \
24    DLOG(ERROR) << "calling NotifyError(): " << x; \
25    NotifyError(x);                                \
26  } while (0)
27
28#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value)              \
29  do {                                                             \
30    if (device_->Ioctl(type, arg) != 0) {                          \
31      DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
32      NOTIFY_ERROR(kPlatformFailureError);                         \
33      return value;                                                \
34    }                                                              \
35  } while (0)
36
37#define IOCTL_OR_ERROR_RETURN(type, arg) \
38  IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0))
39
40#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \
41  IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false)
42
43#define IOCTL_OR_LOG_ERROR(type, arg)                              \
44  do {                                                             \
45    if (device_->Ioctl(type, arg) != 0)                            \
46      DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \
47  } while (0)
48
49namespace content {
50
51struct V4L2VideoEncodeAccelerator::BitstreamBufferRef {
52  BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size)
53      : id(id), shm(shm.Pass()), size(size) {}
54  const int32 id;
55  const scoped_ptr<base::SharedMemory> shm;
56  const size_t size;
57};
58
59V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) {
60}
61
62V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord()
63    : at_device(false), address(NULL), length(0) {
64}
65
66V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator(
67    scoped_ptr<V4L2Device> device)
68    : child_message_loop_proxy_(base::MessageLoopProxy::current()),
69      weak_this_ptr_factory_(this),
70      weak_this_(weak_this_ptr_factory_.GetWeakPtr()),
71      output_buffer_byte_size_(0),
72      device_input_format_(media::VideoFrame::UNKNOWN),
73      input_planes_count_(0),
74      output_format_fourcc_(0),
75      encoder_thread_("V4L2EncoderThread"),
76      encoder_state_(kUninitialized),
77      stream_header_size_(0),
78      device_(device.Pass()),
79      input_streamon_(false),
80      input_buffer_queued_count_(0),
81      input_memory_type_(V4L2_MEMORY_USERPTR),
82      output_streamon_(false),
83      output_buffer_queued_count_(0),
84      device_poll_thread_("V4L2EncoderDevicePollThread") {
85}
86
87V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() {
88  DCHECK(!encoder_thread_.IsRunning());
89  DCHECK(!device_poll_thread_.IsRunning());
90  DVLOG(4) << __func__;
91
92  DestroyInputBuffers();
93  DestroyOutputBuffers();
94}
95
96bool V4L2VideoEncodeAccelerator::Initialize(
97    media::VideoFrame::Format input_format,
98    const gfx::Size& input_visible_size,
99    media::VideoCodecProfile output_profile,
100    uint32 initial_bitrate,
101    Client* client) {
102  DVLOG(3) << __func__ << ": input_format="
103           << media::VideoFrame::FormatToString(input_format)
104           << ", input_visible_size=" << input_visible_size.ToString()
105           << ", output_profile=" << output_profile
106           << ", initial_bitrate=" << initial_bitrate;
107
108  visible_size_ = input_visible_size;
109
110  client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client));
111  client_ = client_ptr_factory_->GetWeakPtr();
112
113  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
114  DCHECK_EQ(encoder_state_, kUninitialized);
115
116  struct v4l2_capability caps;
117  memset(&caps, 0, sizeof(caps));
118  const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE |
119                              V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING;
120  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps);
121  if ((caps.capabilities & kCapsRequired) != kCapsRequired) {
122    DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: "
123                   "caps check failed: 0x" << std::hex << caps.capabilities;
124    return false;
125  }
126
127  if (!SetFormats(input_format, output_profile)) {
128    DLOG(ERROR) << "Failed setting up formats";
129    return false;
130  }
131
132  if (input_format != device_input_format_) {
133    DVLOG(1) << "Input format not supported by the HW, will convert to "
134             << media::VideoFrame::FormatToString(device_input_format_);
135
136    scoped_ptr<V4L2Device> device =
137        V4L2Device::Create(V4L2Device::kImageProcessor);
138    image_processor_.reset(new V4L2ImageProcessor(device.Pass()));
139
140    // Convert from input_format to device_input_format_, keeping the size
141    // at visible_size_ and requiring the output buffers to be of at least
142    // input_allocated_size_.
143    if (!image_processor_->Initialize(
144            input_format,
145            device_input_format_,
146            visible_size_,
147            visible_size_,
148            input_allocated_size_,
149            base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError,
150                       weak_this_))) {
151      DLOG(ERROR) << "Failed initializing image processor";
152      return false;
153    }
154  }
155
156  if (!InitControls())
157    return false;
158
159  if (!CreateOutputBuffers())
160    return false;
161
162  if (!encoder_thread_.Start()) {
163    DLOG(ERROR) << "Initialize(): encoder thread failed to start";
164    return false;
165  }
166
167  RequestEncodingParametersChange(initial_bitrate, kInitialFramerate);
168
169  SetEncoderState(kInitialized);
170
171  child_message_loop_proxy_->PostTask(
172      FROM_HERE,
173      base::Bind(&Client::RequireBitstreamBuffers,
174                 client_,
175                 kInputBufferCount,
176                 image_processor_.get() ?
177                     image_processor_->input_allocated_size() :
178                     input_allocated_size_,
179                 output_buffer_byte_size_));
180  return true;
181}
182
183void V4L2VideoEncodeAccelerator::ImageProcessorError() {
184  DVLOG(1) << "Image processor error";
185  NOTIFY_ERROR(kPlatformFailureError);
186}
187
188void V4L2VideoEncodeAccelerator::Encode(
189    const scoped_refptr<media::VideoFrame>& frame,
190    bool force_keyframe) {
191  DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe;
192  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
193
194  if (image_processor_) {
195    image_processor_->Process(
196        frame,
197        base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed,
198                   weak_this_,
199                   force_keyframe));
200  } else {
201    encoder_thread_.message_loop()->PostTask(
202        FROM_HERE,
203        base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
204                   base::Unretained(this),
205                   frame,
206                   force_keyframe));
207  }
208}
209
210void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer(
211    const media::BitstreamBuffer& buffer) {
212  DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id();
213  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
214
215  if (buffer.size() < output_buffer_byte_size_) {
216    NOTIFY_ERROR(kInvalidArgumentError);
217    return;
218  }
219
220  scoped_ptr<base::SharedMemory> shm(
221      new base::SharedMemory(buffer.handle(), false));
222  if (!shm->Map(buffer.size())) {
223    NOTIFY_ERROR(kPlatformFailureError);
224    return;
225  }
226
227  scoped_ptr<BitstreamBufferRef> buffer_ref(
228      new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size()));
229  encoder_thread_.message_loop()->PostTask(
230      FROM_HERE,
231      base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask,
232                 base::Unretained(this),
233                 base::Passed(&buffer_ref)));
234}
235
236void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange(
237    uint32 bitrate,
238    uint32 framerate) {
239  DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate
240           << ", framerate=" << framerate;
241  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
242
243  encoder_thread_.message_loop()->PostTask(
244      FROM_HERE,
245      base::Bind(
246          &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask,
247          base::Unretained(this),
248          bitrate,
249          framerate));
250}
251
252void V4L2VideoEncodeAccelerator::Destroy() {
253  DVLOG(3) << "Destroy()";
254  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
255
256  // We're destroying; cancel all callbacks.
257  client_ptr_factory_.reset();
258
259  if (image_processor_.get())
260    image_processor_.release()->Destroy();
261
262  // If the encoder thread is running, destroy using posted task.
263  if (encoder_thread_.IsRunning()) {
264    encoder_thread_.message_loop()->PostTask(
265        FROM_HERE,
266        base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask,
267                   base::Unretained(this)));
268    // DestroyTask() will put the encoder into kError state and cause all tasks
269    // to no-op.
270    encoder_thread_.Stop();
271  } else {
272    // Otherwise, call the destroy task directly.
273    DestroyTask();
274  }
275
276  // Set to kError state just in case.
277  SetEncoderState(kError);
278
279  delete this;
280}
281
282// static
283std::vector<media::VideoEncodeAccelerator::SupportedProfile>
284V4L2VideoEncodeAccelerator::GetSupportedProfiles() {
285  std::vector<SupportedProfile> profiles;
286  SupportedProfile profile;
287
288  const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
289  if (cmd_line->HasSwitch(switches::kEnableWebRtcHWVp8Encoding)) {
290    profile.profile = media::VP8PROFILE_MAIN;
291    profile.max_resolution.SetSize(1920, 1088);
292    profile.max_framerate.numerator = 30;
293    profile.max_framerate.denominator = 1;
294    profiles.push_back(profile);
295  } else {
296    profile.profile = media::H264PROFILE_MAIN;
297    profile.max_resolution.SetSize(1920, 1088);
298    profile.max_framerate.numerator = 30;
299    profile.max_framerate.denominator = 1;
300    profiles.push_back(profile);
301  }
302
303  return profiles;
304}
305
306void V4L2VideoEncodeAccelerator::FrameProcessed(
307    bool force_keyframe,
308    const scoped_refptr<media::VideoFrame>& frame) {
309  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
310  DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe;
311
312  encoder_thread_.message_loop()->PostTask(
313      FROM_HERE,
314      base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask,
315                 base::Unretained(this),
316                 frame,
317                 force_keyframe));
318}
319
320void V4L2VideoEncodeAccelerator::EncodeTask(
321    const scoped_refptr<media::VideoFrame>& frame,
322    bool force_keyframe) {
323  DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe;
324  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
325  DCHECK_NE(encoder_state_, kUninitialized);
326
327  if (encoder_state_ == kError) {
328    DVLOG(2) << "EncodeTask(): early out: kError state";
329    return;
330  }
331
332  encoder_input_queue_.push_back(frame);
333  Enqueue();
334
335  if (force_keyframe) {
336    // TODO(posciak): this presently makes for slightly imprecise encoding
337    // parameters updates.  To precisely align the parameter updates with the
338    // incoming input frame, we should queue the parameters together with the
339    // frame onto encoder_input_queue_ and apply them when the input is about
340    // to be queued to the codec.
341    struct v4l2_ext_control ctrls[1];
342    struct v4l2_ext_controls control;
343    memset(&ctrls, 0, sizeof(ctrls));
344    memset(&control, 0, sizeof(control));
345    ctrls[0].id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE;
346    ctrls[0].value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME;
347    control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
348    control.count = 1;
349    control.controls = ctrls;
350    IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control);
351  }
352}
353
354void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask(
355    scoped_ptr<BitstreamBufferRef> buffer_ref) {
356  DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id;
357  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
358
359  encoder_output_queue_.push_back(
360      linked_ptr<BitstreamBufferRef>(buffer_ref.release()));
361  Enqueue();
362
363  if (encoder_state_ == kInitialized) {
364    // Finish setting up our OUTPUT queue.  See: Initialize().
365    // VIDIOC_REQBUFS on OUTPUT queue.
366    if (!CreateInputBuffers())
367      return;
368    if (!StartDevicePoll())
369      return;
370    encoder_state_ = kEncoding;
371  }
372}
373
374void V4L2VideoEncodeAccelerator::DestroyTask() {
375  DVLOG(3) << "DestroyTask()";
376
377  // DestroyTask() should run regardless of encoder_state_.
378
379  // Stop streaming and the device_poll_thread_.
380  StopDevicePoll();
381
382  // Set our state to kError, and early-out all tasks.
383  encoder_state_ = kError;
384}
385
386void V4L2VideoEncodeAccelerator::ServiceDeviceTask() {
387  DVLOG(3) << "ServiceDeviceTask()";
388  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
389  DCHECK_NE(encoder_state_, kUninitialized);
390  DCHECK_NE(encoder_state_, kInitialized);
391
392  if (encoder_state_ == kError) {
393    DVLOG(2) << "ServiceDeviceTask(): early out: kError state";
394    return;
395  }
396
397  Dequeue();
398  Enqueue();
399
400  // Clear the interrupt fd.
401  if (!device_->ClearDevicePollInterrupt())
402    return;
403
404  // Device can be polled as soon as either input or output buffers are queued.
405  bool poll_device =
406      (input_buffer_queued_count_ + output_buffer_queued_count_ > 0);
407
408  // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(),
409  // so either:
410  // * device_poll_thread_ is running normally
411  // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down,
412  //   in which case we're in kError state, and we should have early-outed
413  //   already.
414  DCHECK(device_poll_thread_.message_loop());
415  // Queue the DevicePollTask() now.
416  device_poll_thread_.message_loop()->PostTask(
417      FROM_HERE,
418      base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
419                 base::Unretained(this),
420                 poll_device));
421
422  DVLOG(2) << __func__ << ": buffer counts: ENC["
423           << encoder_input_queue_.size() << "] => DEVICE["
424           << free_input_buffers_.size() << "+"
425           << input_buffer_queued_count_ << "/"
426           << input_buffer_map_.size() << "->"
427           << free_output_buffers_.size() << "+"
428           << output_buffer_queued_count_ << "/"
429           << output_buffer_map_.size() << "] => OUT["
430           << encoder_output_queue_.size() << "]";
431}
432
433void V4L2VideoEncodeAccelerator::Enqueue() {
434  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
435
436  DVLOG(3) << "Enqueue() "
437           << "free_input_buffers: " << free_input_buffers_.size()
438           << "input_queue: " << encoder_input_queue_.size();
439
440  // Enqueue all the inputs we can.
441  const int old_inputs_queued = input_buffer_queued_count_;
442  // while (!ready_input_buffers_.empty()) {
443  while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) {
444    if (!EnqueueInputRecord())
445      return;
446  }
447  if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) {
448    // We just started up a previously empty queue.
449    // Queue state changed; signal interrupt.
450    if (!device_->SetDevicePollInterrupt())
451      return;
452    // Start VIDIOC_STREAMON if we haven't yet.
453    if (!input_streamon_) {
454      __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
455      IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
456      input_streamon_ = true;
457    }
458  }
459
460  // Enqueue all the outputs we can.
461  const int old_outputs_queued = output_buffer_queued_count_;
462  while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) {
463    if (!EnqueueOutputRecord())
464      return;
465  }
466  if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) {
467    // We just started up a previously empty queue.
468    // Queue state changed; signal interrupt.
469    if (!device_->SetDevicePollInterrupt())
470      return;
471    // Start VIDIOC_STREAMON if we haven't yet.
472    if (!output_streamon_) {
473      __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
474      IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type);
475      output_streamon_ = true;
476    }
477  }
478}
479
480void V4L2VideoEncodeAccelerator::Dequeue() {
481  DVLOG(3) << "Dequeue()";
482  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
483
484  // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free
485  // list.
486  struct v4l2_buffer dqbuf;
487  struct v4l2_plane planes[VIDEO_MAX_PLANES];
488  while (input_buffer_queued_count_ > 0) {
489    DVLOG(4) << "inputs queued: " << input_buffer_queued_count_;
490    DCHECK(input_streamon_);
491    memset(&dqbuf, 0, sizeof(dqbuf));
492    memset(&planes, 0, sizeof(planes));
493    dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
494    dqbuf.memory = V4L2_MEMORY_MMAP;
495    dqbuf.m.planes = planes;
496    dqbuf.length = input_planes_count_;
497    if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
498      if (errno == EAGAIN) {
499        // EAGAIN if we're just out of buffers to dequeue.
500        break;
501      }
502      DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
503      NOTIFY_ERROR(kPlatformFailureError);
504      return;
505    }
506    InputRecord& input_record = input_buffer_map_[dqbuf.index];
507    DCHECK(input_record.at_device);
508    input_record.at_device = false;
509
510    input_record.frame = NULL;
511    free_input_buffers_.push_back(dqbuf.index);
512    input_buffer_queued_count_--;
513  }
514
515  // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the
516  // free list.  Notify the client that an output buffer is complete.
517  while (output_buffer_queued_count_ > 0) {
518    DCHECK(output_streamon_);
519    memset(&dqbuf, 0, sizeof(dqbuf));
520    memset(planes, 0, sizeof(planes));
521    dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
522    dqbuf.memory = V4L2_MEMORY_MMAP;
523    dqbuf.m.planes = planes;
524    dqbuf.length = 1;
525    if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) {
526      if (errno == EAGAIN) {
527        // EAGAIN if we're just out of buffers to dequeue.
528        break;
529      }
530      DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF";
531      NOTIFY_ERROR(kPlatformFailureError);
532      return;
533    }
534    const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0);
535    OutputRecord& output_record = output_buffer_map_[dqbuf.index];
536    DCHECK(output_record.at_device);
537    DCHECK(output_record.buffer_ref.get());
538
539    void* output_data = output_record.address;
540    size_t output_size = dqbuf.m.planes[0].bytesused;
541    // This shouldn't happen, but just in case. We should be able to recover
542    // after next keyframe after showing some corruption.
543    DCHECK_LE(output_size, output_buffer_byte_size_);
544    if (output_size > output_buffer_byte_size_)
545      output_size = output_buffer_byte_size_;
546    uint8* target_data =
547        reinterpret_cast<uint8*>(output_record.buffer_ref->shm->memory());
548    if (output_format_fourcc_ == V4L2_PIX_FMT_H264) {
549      if (stream_header_size_ == 0) {
550        // Assume that the first buffer dequeued is the stream header.
551        stream_header_size_ = output_size;
552        stream_header_.reset(new uint8[stream_header_size_]);
553        memcpy(stream_header_.get(), output_data, stream_header_size_);
554      }
555      if (key_frame &&
556          output_buffer_byte_size_ - stream_header_size_ >= output_size) {
557        // Insert stream header before every keyframe.
558        memcpy(target_data, stream_header_.get(), stream_header_size_);
559        memcpy(target_data + stream_header_size_, output_data, output_size);
560        output_size += stream_header_size_;
561      } else {
562        memcpy(target_data, output_data, output_size);
563      }
564    } else {
565      memcpy(target_data, output_data, output_size);
566    }
567
568    DVLOG(3) << "Dequeue(): returning "
569                "bitstream_buffer_id=" << output_record.buffer_ref->id
570             << ", size=" << output_size << ", key_frame=" << key_frame;
571    child_message_loop_proxy_->PostTask(
572        FROM_HERE,
573        base::Bind(&Client::BitstreamBufferReady,
574                   client_,
575                   output_record.buffer_ref->id,
576                   output_size,
577                   key_frame));
578    output_record.at_device = false;
579    output_record.buffer_ref.reset();
580    free_output_buffers_.push_back(dqbuf.index);
581    output_buffer_queued_count_--;
582  }
583}
584
585bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
586  DVLOG(3) << "EnqueueInputRecord()";
587  DCHECK(!free_input_buffers_.empty());
588  DCHECK(!encoder_input_queue_.empty());
589
590  // Enqueue an input (VIDEO_OUTPUT) buffer.
591  scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front();
592  const int index = free_input_buffers_.back();
593  InputRecord& input_record = input_buffer_map_[index];
594  DCHECK(!input_record.at_device);
595  struct v4l2_buffer qbuf;
596  struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES];
597  memset(&qbuf, 0, sizeof(qbuf));
598  memset(qbuf_planes, 0, sizeof(qbuf_planes));
599  qbuf.index = index;
600  qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
601  qbuf.m.planes = qbuf_planes;
602
603  DCHECK_EQ(device_input_format_, frame->format());
604  for (size_t i = 0; i < input_planes_count_; ++i) {
605    qbuf.m.planes[i].bytesused =
606        base::checked_cast<__u32>(media::VideoFrame::PlaneAllocationSize(
607            frame->format(), i, input_allocated_size_));
608
609    switch (input_memory_type_) {
610      case V4L2_MEMORY_USERPTR:
611        qbuf.m.planes[i].m.userptr =
612            reinterpret_cast<unsigned long>(frame->data(i));
613        DCHECK(qbuf.m.planes[i].m.userptr);
614        break;
615
616      case V4L2_MEMORY_DMABUF:
617        qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i);
618        DCHECK_NE(qbuf.m.planes[i].m.fd, -1);
619        break;
620
621      default:
622        NOTREACHED();
623        return false;
624    }
625  }
626
627  qbuf.memory = input_memory_type_;
628  qbuf.length = input_planes_count_;
629
630  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
631  input_record.at_device = true;
632  input_record.frame = frame;
633  encoder_input_queue_.pop_front();
634  free_input_buffers_.pop_back();
635  input_buffer_queued_count_++;
636  return true;
637}
638
639bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() {
640  DVLOG(3) << "EnqueueOutputRecord()";
641  DCHECK(!free_output_buffers_.empty());
642  DCHECK(!encoder_output_queue_.empty());
643
644  // Enqueue an output (VIDEO_CAPTURE) buffer.
645  linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back();
646  const int index = free_output_buffers_.back();
647  OutputRecord& output_record = output_buffer_map_[index];
648  DCHECK(!output_record.at_device);
649  DCHECK(!output_record.buffer_ref.get());
650  struct v4l2_buffer qbuf;
651  struct v4l2_plane qbuf_planes[1];
652  memset(&qbuf, 0, sizeof(qbuf));
653  memset(qbuf_planes, 0, sizeof(qbuf_planes));
654  qbuf.index = index;
655  qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
656  qbuf.memory = V4L2_MEMORY_MMAP;
657  qbuf.m.planes = qbuf_planes;
658  qbuf.length = 1;
659  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf);
660  output_record.at_device = true;
661  output_record.buffer_ref = output_buffer;
662  encoder_output_queue_.pop_back();
663  free_output_buffers_.pop_back();
664  output_buffer_queued_count_++;
665  return true;
666}
667
668bool V4L2VideoEncodeAccelerator::StartDevicePoll() {
669  DVLOG(3) << "StartDevicePoll()";
670  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
671  DCHECK(!device_poll_thread_.IsRunning());
672
673  // Start up the device poll thread and schedule its first DevicePollTask().
674  if (!device_poll_thread_.Start()) {
675    DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start";
676    NOTIFY_ERROR(kPlatformFailureError);
677    return false;
678  }
679  // Enqueue a poll task with no devices to poll on -- it will wait only on the
680  // interrupt fd.
681  device_poll_thread_.message_loop()->PostTask(
682      FROM_HERE,
683      base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask,
684                 base::Unretained(this),
685                 false));
686
687  return true;
688}
689
690bool V4L2VideoEncodeAccelerator::StopDevicePoll() {
691  DVLOG(3) << "StopDevicePoll()";
692
693  // Signal the DevicePollTask() to stop, and stop the device poll thread.
694  if (!device_->SetDevicePollInterrupt())
695    return false;
696  device_poll_thread_.Stop();
697  // Clear the interrupt now, to be sure.
698  if (!device_->ClearDevicePollInterrupt())
699    return false;
700
701  if (input_streamon_) {
702    __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
703    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
704  }
705  input_streamon_ = false;
706
707  if (output_streamon_) {
708    __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
709    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type);
710  }
711  output_streamon_ = false;
712
713  // Reset all our accounting info.
714  encoder_input_queue_.clear();
715  free_input_buffers_.clear();
716  for (size_t i = 0; i < input_buffer_map_.size(); ++i) {
717    InputRecord& input_record = input_buffer_map_[i];
718    input_record.at_device = false;
719    input_record.frame = NULL;
720    free_input_buffers_.push_back(i);
721  }
722  input_buffer_queued_count_ = 0;
723
724  free_output_buffers_.clear();
725  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
726    OutputRecord& output_record = output_buffer_map_[i];
727    output_record.at_device = false;
728    output_record.buffer_ref.reset();
729    free_output_buffers_.push_back(i);
730  }
731  output_buffer_queued_count_ = 0;
732
733  encoder_output_queue_.clear();
734
735  DVLOG(3) << "StopDevicePoll(): device poll stopped";
736  return true;
737}
738
739void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) {
740  DVLOG(3) << "DevicePollTask()";
741  DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current());
742
743  bool event_pending;
744  if (!device_->Poll(poll_device, &event_pending)) {
745    NOTIFY_ERROR(kPlatformFailureError);
746    return;
747  }
748
749  // All processing should happen on ServiceDeviceTask(), since we shouldn't
750  // touch encoder state from this thread.
751  encoder_thread_.message_loop()->PostTask(
752      FROM_HERE,
753      base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask,
754                 base::Unretained(this)));
755}
756
757void V4L2VideoEncodeAccelerator::NotifyError(Error error) {
758  DVLOG(1) << "NotifyError(): error=" << error;
759
760  if (!child_message_loop_proxy_->BelongsToCurrentThread()) {
761    child_message_loop_proxy_->PostTask(
762        FROM_HERE,
763        base::Bind(
764            &V4L2VideoEncodeAccelerator::NotifyError, weak_this_, error));
765    return;
766  }
767
768  if (client_) {
769    client_->NotifyError(error);
770    client_ptr_factory_.reset();
771  }
772}
773
774void V4L2VideoEncodeAccelerator::SetEncoderState(State state) {
775  DVLOG(3) << "SetEncoderState(): state=" << state;
776
777  // We can touch encoder_state_ only if this is the encoder thread or the
778  // encoder thread isn't running.
779  if (encoder_thread_.message_loop() != NULL &&
780      encoder_thread_.message_loop() != base::MessageLoop::current()) {
781    encoder_thread_.message_loop()->PostTask(
782        FROM_HERE,
783        base::Bind(&V4L2VideoEncodeAccelerator::SetEncoderState,
784                   base::Unretained(this),
785                   state));
786  } else {
787    encoder_state_ = state;
788  }
789}
790
791void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask(
792    uint32 bitrate,
793    uint32 framerate) {
794  DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate
795           << ", framerate=" << framerate;
796  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
797
798  if (bitrate < 1)
799    bitrate = 1;
800  if (framerate < 1)
801    framerate = 1;
802
803  struct v4l2_ext_control ctrls[1];
804  struct v4l2_ext_controls control;
805  memset(&ctrls, 0, sizeof(ctrls));
806  memset(&control, 0, sizeof(control));
807  ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE;
808  ctrls[0].value = bitrate;
809  control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
810  control.count = arraysize(ctrls);
811  control.controls = ctrls;
812  IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control);
813
814  struct v4l2_streamparm parms;
815  memset(&parms, 0, sizeof(parms));
816  parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
817  // Note that we are provided "frames per second" but V4L2 expects "time per
818  // frame"; hence we provide the reciprocal of the framerate here.
819  parms.parm.output.timeperframe.numerator = 1;
820  parms.parm.output.timeperframe.denominator = framerate;
821  IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms);
822}
823
824bool V4L2VideoEncodeAccelerator::SetOutputFormat(
825    media::VideoCodecProfile output_profile) {
826  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
827  DCHECK(!input_streamon_);
828  DCHECK(!output_streamon_);
829
830  output_format_fourcc_ =
831      V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile);
832  if (!output_format_fourcc_) {
833    DLOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile;
834    return false;
835  }
836
837  output_buffer_byte_size_ = kOutputBufferSize;
838
839  struct v4l2_format format;
840  memset(&format, 0, sizeof(format));
841  format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
842  format.fmt.pix_mp.width = visible_size_.width();
843  format.fmt.pix_mp.height = visible_size_.height();
844  format.fmt.pix_mp.pixelformat = output_format_fourcc_;
845  format.fmt.pix_mp.plane_fmt[0].sizeimage =
846      base::checked_cast<__u32>(output_buffer_byte_size_);
847  format.fmt.pix_mp.num_planes = 1;
848  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
849
850  // Device might have adjusted the required output size.
851  size_t adjusted_output_buffer_size =
852      base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage);
853  DCHECK_GE(adjusted_output_buffer_size, output_buffer_byte_size_);
854  output_buffer_byte_size_ = adjusted_output_buffer_size;
855
856  return true;
857}
858
859bool V4L2VideoEncodeAccelerator::NegotiateInputFormat(
860    media::VideoFrame::Format input_format) {
861  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
862  DCHECK(!input_streamon_);
863  DCHECK(!output_streamon_);
864
865  device_input_format_ = media::VideoFrame::UNKNOWN;
866  input_planes_count_ = 0;
867
868  uint32 input_format_fourcc =
869      V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format);
870  if (!input_format_fourcc) {
871    DVLOG(1) << "Unsupported input format";
872    return false;
873  }
874
875  size_t input_planes_count = media::VideoFrame::NumPlanes(input_format);
876  DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
877
878  // First see if we the device can use the provided input_format directly.
879  struct v4l2_format format;
880  memset(&format, 0, sizeof(format));
881  format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
882  format.fmt.pix_mp.width = visible_size_.width();
883  format.fmt.pix_mp.height = visible_size_.height();
884  format.fmt.pix_mp.pixelformat = input_format_fourcc;
885  format.fmt.pix_mp.num_planes = input_planes_count;
886  if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) {
887    // Error or format unsupported by device, try to negotiate a fallback.
888    input_format_fourcc = device_->PreferredInputFormat();
889    input_format =
890        V4L2Device::V4L2PixFmtToVideoFrameFormat(input_format_fourcc);
891    if (input_format == media::VideoFrame::UNKNOWN)
892      return false;
893
894    input_planes_count = media::VideoFrame::NumPlanes(input_format);
895    DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES));
896
897    // Device might have adjusted parameters, reset them along with the format.
898    memset(&format, 0, sizeof(format));
899    format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
900    format.fmt.pix_mp.width = visible_size_.width();
901    format.fmt.pix_mp.height = visible_size_.height();
902    format.fmt.pix_mp.pixelformat = input_format_fourcc;
903    format.fmt.pix_mp.num_planes = input_planes_count;
904    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format);
905    DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count);
906  }
907
908  // Take device-adjusted sizes for allocated size.
909  input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format);
910  DCHECK(gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_)));
911
912  device_input_format_ = input_format;
913  input_planes_count_ = input_planes_count;
914  return true;
915}
916
917bool V4L2VideoEncodeAccelerator::SetFormats(
918    media::VideoFrame::Format input_format,
919    media::VideoCodecProfile output_profile) {
920  DVLOG(3) << "SetFormats()";
921  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
922  DCHECK(!input_streamon_);
923  DCHECK(!output_streamon_);
924
925  if (!SetOutputFormat(output_profile))
926    return false;
927
928  if (!NegotiateInputFormat(input_format))
929    return false;
930
931  struct v4l2_crop crop;
932  memset(&crop, 0, sizeof(crop));
933  crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT;
934  crop.c.left = 0;
935  crop.c.top = 0;
936  crop.c.width = visible_size_.width();
937  crop.c.height = visible_size_.height();
938  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop);
939
940  return true;
941}
942
943bool V4L2VideoEncodeAccelerator::InitControls() {
944  struct v4l2_ext_control ctrls[9];
945  struct v4l2_ext_controls control;
946  memset(&ctrls, 0, sizeof(ctrls));
947  memset(&control, 0, sizeof(control));
948  // No B-frames, for lowest decoding latency.
949  ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES;
950  ctrls[0].value = 0;
951  // Enable frame-level bitrate control.
952  ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE;
953  ctrls[1].value = 1;
954  // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level
955  // bitrate controls have to be enabled as well.
956  ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF;
957  ctrls[2].value = 1;
958  // Force bitrate control to average over a GOP (for tight bitrate
959  // tolerance).
960  ctrls[3].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT;
961  ctrls[3].value = 1;
962  // Quantization parameter maximum value (for variable bitrate control).
963  ctrls[4].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP;
964  ctrls[4].value = 51;
965  // Separate stream header so we can cache it and insert into the stream.
966  ctrls[5].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE;
967  ctrls[5].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE;
968  // Enable macroblock-level bitrate control.
969  ctrls[6].id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE;
970  ctrls[6].value = 1;
971  // Use H.264 level 4.0 to match the supported max resolution.
972  ctrls[7].id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
973  ctrls[7].value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
974  // Disable periodic key frames.
975  ctrls[8].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE;
976  ctrls[8].value = 0;
977  control.ctrl_class = V4L2_CTRL_CLASS_MPEG;
978  control.count = arraysize(ctrls);
979  control.controls = ctrls;
980  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, &control);
981
982  return true;
983}
984
985bool V4L2VideoEncodeAccelerator::CreateInputBuffers() {
986  DVLOG(3) << "CreateInputBuffers()";
987  // This function runs on encoder_thread_ after output buffers have been
988  // provided by the client.
989  DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current());
990  DCHECK(!input_streamon_);
991
992  struct v4l2_requestbuffers reqbufs;
993  memset(&reqbufs, 0, sizeof(reqbufs));
994  // Driver will modify to the appropriate number of buffers.
995  reqbufs.count = 1;
996  reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
997  // TODO(posciak): Once we start doing zero-copy, we should decide based on
998  // the current pipeline setup which memory type to use. This should probably
999  // be decided based on an argument to Initialize().
1000  if (image_processor_.get())
1001    input_memory_type_ = V4L2_MEMORY_DMABUF;
1002  else
1003    input_memory_type_ = V4L2_MEMORY_USERPTR;
1004
1005  reqbufs.memory = input_memory_type_;
1006  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1007
1008  DCHECK(input_buffer_map_.empty());
1009  input_buffer_map_.resize(reqbufs.count);
1010  for (size_t i = 0; i < input_buffer_map_.size(); ++i)
1011    free_input_buffers_.push_back(i);
1012
1013  return true;
1014}
1015
1016bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() {
1017  DVLOG(3) << "CreateOutputBuffers()";
1018  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1019  DCHECK(!output_streamon_);
1020
1021  struct v4l2_requestbuffers reqbufs;
1022  memset(&reqbufs, 0, sizeof(reqbufs));
1023  reqbufs.count = kOutputBufferCount;
1024  reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1025  reqbufs.memory = V4L2_MEMORY_MMAP;
1026  IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs);
1027
1028  DCHECK(output_buffer_map_.empty());
1029  output_buffer_map_.resize(reqbufs.count);
1030  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1031    struct v4l2_plane planes[1];
1032    struct v4l2_buffer buffer;
1033    memset(&buffer, 0, sizeof(buffer));
1034    memset(planes, 0, sizeof(planes));
1035    buffer.index = i;
1036    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1037    buffer.memory = V4L2_MEMORY_MMAP;
1038    buffer.m.planes = planes;
1039    buffer.length = arraysize(planes);
1040    IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer);
1041    void* address = device_->Mmap(NULL,
1042                                  buffer.m.planes[0].length,
1043                                  PROT_READ | PROT_WRITE,
1044                                  MAP_SHARED,
1045                                  buffer.m.planes[0].m.mem_offset);
1046    if (address == MAP_FAILED) {
1047      DPLOG(ERROR) << "CreateOutputBuffers(): mmap() failed";
1048      return false;
1049    }
1050    output_buffer_map_[i].address = address;
1051    output_buffer_map_[i].length = buffer.m.planes[0].length;
1052    free_output_buffers_.push_back(i);
1053  }
1054
1055  return true;
1056}
1057
1058void V4L2VideoEncodeAccelerator::DestroyInputBuffers() {
1059  DVLOG(3) << "DestroyInputBuffers()";
1060  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1061  DCHECK(!input_streamon_);
1062
1063  struct v4l2_requestbuffers reqbufs;
1064  memset(&reqbufs, 0, sizeof(reqbufs));
1065  reqbufs.count = 0;
1066  reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1067  reqbufs.memory = input_memory_type_;
1068  IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1069
1070  input_buffer_map_.clear();
1071  free_input_buffers_.clear();
1072}
1073
1074void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() {
1075  DVLOG(3) << "DestroyOutputBuffers()";
1076  DCHECK(child_message_loop_proxy_->BelongsToCurrentThread());
1077  DCHECK(!output_streamon_);
1078
1079  for (size_t i = 0; i < output_buffer_map_.size(); ++i) {
1080    if (output_buffer_map_[i].address != NULL)
1081      device_->Munmap(output_buffer_map_[i].address,
1082                      output_buffer_map_[i].length);
1083  }
1084
1085  struct v4l2_requestbuffers reqbufs;
1086  memset(&reqbufs, 0, sizeof(reqbufs));
1087  reqbufs.count = 0;
1088  reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1089  reqbufs.memory = V4L2_MEMORY_MMAP;
1090  IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs);
1091
1092  output_buffer_map_.clear();
1093  free_output_buffers_.clear();
1094}
1095
1096}  // namespace content
1097