1// Copyright 2014 The Chromium Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include <fcntl.h> 6#include <linux/videodev2.h> 7#include <poll.h> 8#include <sys/eventfd.h> 9#include <sys/ioctl.h> 10#include <sys/mman.h> 11 12#include "base/callback.h" 13#include "base/command_line.h" 14#include "base/debug/trace_event.h" 15#include "base/message_loop/message_loop_proxy.h" 16#include "base/numerics/safe_conversions.h" 17#include "content/common/gpu/media/v4l2_video_encode_accelerator.h" 18#include "content/public/common/content_switches.h" 19#include "media/base/bitstream_buffer.h" 20 21#define NOTIFY_ERROR(x) \ 22 do { \ 23 SetEncoderState(kError); \ 24 DLOG(ERROR) << "calling NotifyError(): " << x; \ 25 NotifyError(x); \ 26 } while (0) 27 28#define IOCTL_OR_ERROR_RETURN_VALUE(type, arg, value) \ 29 do { \ 30 if (device_->Ioctl(type, arg) != 0) { \ 31 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ 32 NOTIFY_ERROR(kPlatformFailureError); \ 33 return value; \ 34 } \ 35 } while (0) 36 37#define IOCTL_OR_ERROR_RETURN(type, arg) \ 38 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, ((void)0)) 39 40#define IOCTL_OR_ERROR_RETURN_FALSE(type, arg) \ 41 IOCTL_OR_ERROR_RETURN_VALUE(type, arg, false) 42 43#define IOCTL_OR_LOG_ERROR(type, arg) \ 44 do { \ 45 if (device_->Ioctl(type, arg) != 0) \ 46 DPLOG(ERROR) << __func__ << "(): ioctl() failed: " << #type; \ 47 } while (0) 48 49namespace content { 50 51struct V4L2VideoEncodeAccelerator::BitstreamBufferRef { 52 BitstreamBufferRef(int32 id, scoped_ptr<base::SharedMemory> shm, size_t size) 53 : id(id), shm(shm.Pass()), size(size) {} 54 const int32 id; 55 const scoped_ptr<base::SharedMemory> shm; 56 const size_t size; 57}; 58 59V4L2VideoEncodeAccelerator::InputRecord::InputRecord() : at_device(false) { 60} 61 62V4L2VideoEncodeAccelerator::OutputRecord::OutputRecord() 63 : at_device(false), address(NULL), length(0) { 64} 65 66V4L2VideoEncodeAccelerator::V4L2VideoEncodeAccelerator( 67 scoped_ptr<V4L2Device> device) 68 : child_message_loop_proxy_(base::MessageLoopProxy::current()), 69 output_buffer_byte_size_(0), 70 device_input_format_(media::VideoFrame::UNKNOWN), 71 input_planes_count_(0), 72 output_format_fourcc_(0), 73 encoder_state_(kUninitialized), 74 stream_header_size_(0), 75 device_(device.Pass()), 76 input_streamon_(false), 77 input_buffer_queued_count_(0), 78 input_memory_type_(V4L2_MEMORY_USERPTR), 79 output_streamon_(false), 80 output_buffer_queued_count_(0), 81 encoder_thread_("V4L2EncoderThread"), 82 device_poll_thread_("V4L2EncoderDevicePollThread"), 83 weak_this_ptr_factory_(this) { 84 weak_this_ = weak_this_ptr_factory_.GetWeakPtr(); 85} 86 87V4L2VideoEncodeAccelerator::~V4L2VideoEncodeAccelerator() { 88 DCHECK(!encoder_thread_.IsRunning()); 89 DCHECK(!device_poll_thread_.IsRunning()); 90 DVLOG(4) << __func__; 91 92 DestroyInputBuffers(); 93 DestroyOutputBuffers(); 94} 95 96bool V4L2VideoEncodeAccelerator::Initialize( 97 media::VideoFrame::Format input_format, 98 const gfx::Size& input_visible_size, 99 media::VideoCodecProfile output_profile, 100 uint32 initial_bitrate, 101 Client* client) { 102 DVLOG(3) << __func__ << ": input_format=" 103 << media::VideoFrame::FormatToString(input_format) 104 << ", input_visible_size=" << input_visible_size.ToString() 105 << ", output_profile=" << output_profile 106 << ", initial_bitrate=" << initial_bitrate; 107 108 visible_size_ = input_visible_size; 109 110 client_ptr_factory_.reset(new base::WeakPtrFactory<Client>(client)); 111 client_ = client_ptr_factory_->GetWeakPtr(); 112 113 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 114 DCHECK_EQ(encoder_state_, kUninitialized); 115 116 struct v4l2_capability caps; 117 memset(&caps, 0, sizeof(caps)); 118 const __u32 kCapsRequired = V4L2_CAP_VIDEO_CAPTURE_MPLANE | 119 V4L2_CAP_VIDEO_OUTPUT_MPLANE | V4L2_CAP_STREAMING; 120 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYCAP, &caps); 121 if ((caps.capabilities & kCapsRequired) != kCapsRequired) { 122 DLOG(ERROR) << "Initialize(): ioctl() failed: VIDIOC_QUERYCAP: " 123 "caps check failed: 0x" << std::hex << caps.capabilities; 124 return false; 125 } 126 127 if (!SetFormats(input_format, output_profile)) { 128 DLOG(ERROR) << "Failed setting up formats"; 129 return false; 130 } 131 132 if (input_format != device_input_format_) { 133 DVLOG(1) << "Input format not supported by the HW, will convert to " 134 << media::VideoFrame::FormatToString(device_input_format_); 135 136 scoped_ptr<V4L2Device> device = 137 V4L2Device::Create(V4L2Device::kImageProcessor); 138 image_processor_.reset(new V4L2ImageProcessor(device.Pass())); 139 140 // Convert from input_format to device_input_format_, keeping the size 141 // at visible_size_ and requiring the output buffers to be of at least 142 // input_allocated_size_. 143 if (!image_processor_->Initialize( 144 input_format, 145 device_input_format_, 146 visible_size_, 147 visible_size_, 148 input_allocated_size_, 149 base::Bind(&V4L2VideoEncodeAccelerator::ImageProcessorError, 150 weak_this_))) { 151 DLOG(ERROR) << "Failed initializing image processor"; 152 return false; 153 } 154 } 155 156 if (!InitControls()) 157 return false; 158 159 if (!CreateOutputBuffers()) 160 return false; 161 162 if (!encoder_thread_.Start()) { 163 DLOG(ERROR) << "Initialize(): encoder thread failed to start"; 164 return false; 165 } 166 167 RequestEncodingParametersChange(initial_bitrate, kInitialFramerate); 168 169 SetEncoderState(kInitialized); 170 171 child_message_loop_proxy_->PostTask( 172 FROM_HERE, 173 base::Bind(&Client::RequireBitstreamBuffers, 174 client_, 175 kInputBufferCount, 176 image_processor_.get() ? 177 image_processor_->input_allocated_size() : 178 input_allocated_size_, 179 output_buffer_byte_size_)); 180 return true; 181} 182 183void V4L2VideoEncodeAccelerator::ImageProcessorError() { 184 DVLOG(1) << "Image processor error"; 185 NOTIFY_ERROR(kPlatformFailureError); 186} 187 188void V4L2VideoEncodeAccelerator::Encode( 189 const scoped_refptr<media::VideoFrame>& frame, 190 bool force_keyframe) { 191 DVLOG(3) << "Encode(): force_keyframe=" << force_keyframe; 192 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 193 194 if (image_processor_) { 195 image_processor_->Process( 196 frame, 197 base::Bind(&V4L2VideoEncodeAccelerator::FrameProcessed, 198 weak_this_, 199 force_keyframe)); 200 } else { 201 encoder_thread_.message_loop()->PostTask( 202 FROM_HERE, 203 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, 204 base::Unretained(this), 205 frame, 206 force_keyframe)); 207 } 208} 209 210void V4L2VideoEncodeAccelerator::UseOutputBitstreamBuffer( 211 const media::BitstreamBuffer& buffer) { 212 DVLOG(3) << "UseOutputBitstreamBuffer(): id=" << buffer.id(); 213 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 214 215 if (buffer.size() < output_buffer_byte_size_) { 216 NOTIFY_ERROR(kInvalidArgumentError); 217 return; 218 } 219 220 scoped_ptr<base::SharedMemory> shm( 221 new base::SharedMemory(buffer.handle(), false)); 222 if (!shm->Map(buffer.size())) { 223 NOTIFY_ERROR(kPlatformFailureError); 224 return; 225 } 226 227 scoped_ptr<BitstreamBufferRef> buffer_ref( 228 new BitstreamBufferRef(buffer.id(), shm.Pass(), buffer.size())); 229 encoder_thread_.message_loop()->PostTask( 230 FROM_HERE, 231 base::Bind(&V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask, 232 base::Unretained(this), 233 base::Passed(&buffer_ref))); 234} 235 236void V4L2VideoEncodeAccelerator::RequestEncodingParametersChange( 237 uint32 bitrate, 238 uint32 framerate) { 239 DVLOG(3) << "RequestEncodingParametersChange(): bitrate=" << bitrate 240 << ", framerate=" << framerate; 241 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 242 243 encoder_thread_.message_loop()->PostTask( 244 FROM_HERE, 245 base::Bind( 246 &V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask, 247 base::Unretained(this), 248 bitrate, 249 framerate)); 250} 251 252void V4L2VideoEncodeAccelerator::Destroy() { 253 DVLOG(3) << "Destroy()"; 254 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 255 256 // We're destroying; cancel all callbacks. 257 client_ptr_factory_.reset(); 258 weak_this_ptr_factory_.InvalidateWeakPtrs(); 259 260 if (image_processor_.get()) 261 image_processor_.release()->Destroy(); 262 263 // If the encoder thread is running, destroy using posted task. 264 if (encoder_thread_.IsRunning()) { 265 encoder_thread_.message_loop()->PostTask( 266 FROM_HERE, 267 base::Bind(&V4L2VideoEncodeAccelerator::DestroyTask, 268 base::Unretained(this))); 269 // DestroyTask() will put the encoder into kError state and cause all tasks 270 // to no-op. 271 encoder_thread_.Stop(); 272 } else { 273 // Otherwise, call the destroy task directly. 274 DestroyTask(); 275 } 276 277 // Set to kError state just in case. 278 SetEncoderState(kError); 279 280 delete this; 281} 282 283std::vector<media::VideoEncodeAccelerator::SupportedProfile> 284V4L2VideoEncodeAccelerator::GetSupportedProfiles() { 285 return GetSupportedProfilesStatic(); 286} 287 288std::vector<media::VideoEncodeAccelerator::SupportedProfile> 289V4L2VideoEncodeAccelerator::GetSupportedProfilesStatic() { 290 std::vector<SupportedProfile> profiles; 291 SupportedProfile profile; 292 293 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); 294 if (cmd_line->HasSwitch(switches::kEnableWebRtcHWVp8Encoding)) { 295 profile.profile = media::VP8PROFILE_ANY; 296 profile.max_resolution.SetSize(1920, 1088); 297 profile.max_framerate_numerator = 30; 298 profile.max_framerate_denominator = 1; 299 profiles.push_back(profile); 300 } 301 302 profile.profile = media::H264PROFILE_MAIN; 303 profile.max_resolution.SetSize(1920, 1088); 304 profile.max_framerate_numerator = 30; 305 profile.max_framerate_denominator = 1; 306 profiles.push_back(profile); 307 308 return profiles; 309} 310 311void V4L2VideoEncodeAccelerator::FrameProcessed( 312 bool force_keyframe, 313 const scoped_refptr<media::VideoFrame>& frame) { 314 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 315 DVLOG(3) << "FrameProcessed(): force_keyframe=" << force_keyframe; 316 317 encoder_thread_.message_loop()->PostTask( 318 FROM_HERE, 319 base::Bind(&V4L2VideoEncodeAccelerator::EncodeTask, 320 base::Unretained(this), 321 frame, 322 force_keyframe)); 323} 324 325void V4L2VideoEncodeAccelerator::EncodeTask( 326 const scoped_refptr<media::VideoFrame>& frame, 327 bool force_keyframe) { 328 DVLOG(3) << "EncodeTask(): force_keyframe=" << force_keyframe; 329 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 330 DCHECK_NE(encoder_state_, kUninitialized); 331 332 if (encoder_state_ == kError) { 333 DVLOG(2) << "EncodeTask(): early out: kError state"; 334 return; 335 } 336 337 encoder_input_queue_.push_back(frame); 338 Enqueue(); 339 340 if (force_keyframe) { 341 // TODO(posciak): this presently makes for slightly imprecise encoding 342 // parameters updates. To precisely align the parameter updates with the 343 // incoming input frame, we should queue the parameters together with the 344 // frame onto encoder_input_queue_ and apply them when the input is about 345 // to be queued to the codec. 346 struct v4l2_ext_control ctrls[1]; 347 struct v4l2_ext_controls control; 348 memset(&ctrls, 0, sizeof(ctrls)); 349 memset(&control, 0, sizeof(control)); 350 ctrls[0].id = V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE; 351 ctrls[0].value = V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME; 352 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; 353 control.count = 1; 354 control.controls = ctrls; 355 IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control); 356 } 357} 358 359void V4L2VideoEncodeAccelerator::UseOutputBitstreamBufferTask( 360 scoped_ptr<BitstreamBufferRef> buffer_ref) { 361 DVLOG(3) << "UseOutputBitstreamBufferTask(): id=" << buffer_ref->id; 362 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 363 364 encoder_output_queue_.push_back( 365 linked_ptr<BitstreamBufferRef>(buffer_ref.release())); 366 Enqueue(); 367 368 if (encoder_state_ == kInitialized) { 369 // Finish setting up our OUTPUT queue. See: Initialize(). 370 // VIDIOC_REQBUFS on OUTPUT queue. 371 if (!CreateInputBuffers()) 372 return; 373 if (!StartDevicePoll()) 374 return; 375 encoder_state_ = kEncoding; 376 } 377} 378 379void V4L2VideoEncodeAccelerator::DestroyTask() { 380 DVLOG(3) << "DestroyTask()"; 381 382 // DestroyTask() should run regardless of encoder_state_. 383 384 // Stop streaming and the device_poll_thread_. 385 StopDevicePoll(); 386 387 // Set our state to kError, and early-out all tasks. 388 encoder_state_ = kError; 389} 390 391void V4L2VideoEncodeAccelerator::ServiceDeviceTask() { 392 DVLOG(3) << "ServiceDeviceTask()"; 393 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 394 DCHECK_NE(encoder_state_, kUninitialized); 395 DCHECK_NE(encoder_state_, kInitialized); 396 397 if (encoder_state_ == kError) { 398 DVLOG(2) << "ServiceDeviceTask(): early out: kError state"; 399 return; 400 } 401 402 Dequeue(); 403 Enqueue(); 404 405 // Clear the interrupt fd. 406 if (!device_->ClearDevicePollInterrupt()) 407 return; 408 409 // Device can be polled as soon as either input or output buffers are queued. 410 bool poll_device = 411 (input_buffer_queued_count_ + output_buffer_queued_count_ > 0); 412 413 // ServiceDeviceTask() should only ever be scheduled from DevicePollTask(), 414 // so either: 415 // * device_poll_thread_ is running normally 416 // * device_poll_thread_ scheduled us, but then a DestroyTask() shut it down, 417 // in which case we're in kError state, and we should have early-outed 418 // already. 419 DCHECK(device_poll_thread_.message_loop()); 420 // Queue the DevicePollTask() now. 421 device_poll_thread_.message_loop()->PostTask( 422 FROM_HERE, 423 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask, 424 base::Unretained(this), 425 poll_device)); 426 427 DVLOG(2) << __func__ << ": buffer counts: ENC[" 428 << encoder_input_queue_.size() << "] => DEVICE[" 429 << free_input_buffers_.size() << "+" 430 << input_buffer_queued_count_ << "/" 431 << input_buffer_map_.size() << "->" 432 << free_output_buffers_.size() << "+" 433 << output_buffer_queued_count_ << "/" 434 << output_buffer_map_.size() << "] => OUT[" 435 << encoder_output_queue_.size() << "]"; 436} 437 438void V4L2VideoEncodeAccelerator::Enqueue() { 439 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 440 441 DVLOG(3) << "Enqueue() " 442 << "free_input_buffers: " << free_input_buffers_.size() 443 << "input_queue: " << encoder_input_queue_.size(); 444 445 // Enqueue all the inputs we can. 446 const int old_inputs_queued = input_buffer_queued_count_; 447 // while (!ready_input_buffers_.empty()) { 448 while (!encoder_input_queue_.empty() && !free_input_buffers_.empty()) { 449 if (!EnqueueInputRecord()) 450 return; 451 } 452 if (old_inputs_queued == 0 && input_buffer_queued_count_ != 0) { 453 // We just started up a previously empty queue. 454 // Queue state changed; signal interrupt. 455 if (!device_->SetDevicePollInterrupt()) 456 return; 457 // Start VIDIOC_STREAMON if we haven't yet. 458 if (!input_streamon_) { 459 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 460 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 461 input_streamon_ = true; 462 } 463 } 464 465 // Enqueue all the outputs we can. 466 const int old_outputs_queued = output_buffer_queued_count_; 467 while (!free_output_buffers_.empty() && !encoder_output_queue_.empty()) { 468 if (!EnqueueOutputRecord()) 469 return; 470 } 471 if (old_outputs_queued == 0 && output_buffer_queued_count_ != 0) { 472 // We just started up a previously empty queue. 473 // Queue state changed; signal interrupt. 474 if (!device_->SetDevicePollInterrupt()) 475 return; 476 // Start VIDIOC_STREAMON if we haven't yet. 477 if (!output_streamon_) { 478 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 479 IOCTL_OR_ERROR_RETURN(VIDIOC_STREAMON, &type); 480 output_streamon_ = true; 481 } 482 } 483} 484 485void V4L2VideoEncodeAccelerator::Dequeue() { 486 DVLOG(3) << "Dequeue()"; 487 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 488 489 // Dequeue completed input (VIDEO_OUTPUT) buffers, and recycle to the free 490 // list. 491 struct v4l2_buffer dqbuf; 492 struct v4l2_plane planes[VIDEO_MAX_PLANES]; 493 while (input_buffer_queued_count_ > 0) { 494 DVLOG(4) << "inputs queued: " << input_buffer_queued_count_; 495 DCHECK(input_streamon_); 496 memset(&dqbuf, 0, sizeof(dqbuf)); 497 memset(&planes, 0, sizeof(planes)); 498 dqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 499 dqbuf.memory = V4L2_MEMORY_MMAP; 500 dqbuf.m.planes = planes; 501 dqbuf.length = input_planes_count_; 502 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 503 if (errno == EAGAIN) { 504 // EAGAIN if we're just out of buffers to dequeue. 505 break; 506 } 507 DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF"; 508 NOTIFY_ERROR(kPlatformFailureError); 509 return; 510 } 511 InputRecord& input_record = input_buffer_map_[dqbuf.index]; 512 DCHECK(input_record.at_device); 513 input_record.at_device = false; 514 515 input_record.frame = NULL; 516 free_input_buffers_.push_back(dqbuf.index); 517 input_buffer_queued_count_--; 518 } 519 520 // Dequeue completed output (VIDEO_CAPTURE) buffers, and recycle to the 521 // free list. Notify the client that an output buffer is complete. 522 while (output_buffer_queued_count_ > 0) { 523 DCHECK(output_streamon_); 524 memset(&dqbuf, 0, sizeof(dqbuf)); 525 memset(planes, 0, sizeof(planes)); 526 dqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 527 dqbuf.memory = V4L2_MEMORY_MMAP; 528 dqbuf.m.planes = planes; 529 dqbuf.length = 1; 530 if (device_->Ioctl(VIDIOC_DQBUF, &dqbuf) != 0) { 531 if (errno == EAGAIN) { 532 // EAGAIN if we're just out of buffers to dequeue. 533 break; 534 } 535 DPLOG(ERROR) << "Dequeue(): ioctl() failed: VIDIOC_DQBUF"; 536 NOTIFY_ERROR(kPlatformFailureError); 537 return; 538 } 539 const bool key_frame = ((dqbuf.flags & V4L2_BUF_FLAG_KEYFRAME) != 0); 540 OutputRecord& output_record = output_buffer_map_[dqbuf.index]; 541 DCHECK(output_record.at_device); 542 DCHECK(output_record.buffer_ref.get()); 543 544 void* output_data = output_record.address; 545 size_t output_size = dqbuf.m.planes[0].bytesused; 546 // This shouldn't happen, but just in case. We should be able to recover 547 // after next keyframe after showing some corruption. 548 DCHECK_LE(output_size, output_buffer_byte_size_); 549 if (output_size > output_buffer_byte_size_) 550 output_size = output_buffer_byte_size_; 551 uint8* target_data = 552 reinterpret_cast<uint8*>(output_record.buffer_ref->shm->memory()); 553 if (output_format_fourcc_ == V4L2_PIX_FMT_H264) { 554 if (stream_header_size_ == 0) { 555 // Assume that the first buffer dequeued is the stream header. 556 stream_header_size_ = output_size; 557 stream_header_.reset(new uint8[stream_header_size_]); 558 memcpy(stream_header_.get(), output_data, stream_header_size_); 559 } 560 if (key_frame && 561 output_buffer_byte_size_ - stream_header_size_ >= output_size) { 562 // Insert stream header before every keyframe. 563 memcpy(target_data, stream_header_.get(), stream_header_size_); 564 memcpy(target_data + stream_header_size_, output_data, output_size); 565 output_size += stream_header_size_; 566 } else { 567 memcpy(target_data, output_data, output_size); 568 } 569 } else { 570 memcpy(target_data, output_data, output_size); 571 } 572 573 DVLOG(3) << "Dequeue(): returning " 574 "bitstream_buffer_id=" << output_record.buffer_ref->id 575 << ", size=" << output_size << ", key_frame=" << key_frame; 576 child_message_loop_proxy_->PostTask( 577 FROM_HERE, 578 base::Bind(&Client::BitstreamBufferReady, 579 client_, 580 output_record.buffer_ref->id, 581 output_size, 582 key_frame)); 583 output_record.at_device = false; 584 output_record.buffer_ref.reset(); 585 free_output_buffers_.push_back(dqbuf.index); 586 output_buffer_queued_count_--; 587 } 588} 589 590bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() { 591 DVLOG(3) << "EnqueueInputRecord()"; 592 DCHECK(!free_input_buffers_.empty()); 593 DCHECK(!encoder_input_queue_.empty()); 594 595 // Enqueue an input (VIDEO_OUTPUT) buffer. 596 scoped_refptr<media::VideoFrame> frame = encoder_input_queue_.front(); 597 const int index = free_input_buffers_.back(); 598 InputRecord& input_record = input_buffer_map_[index]; 599 DCHECK(!input_record.at_device); 600 struct v4l2_buffer qbuf; 601 struct v4l2_plane qbuf_planes[VIDEO_MAX_PLANES]; 602 memset(&qbuf, 0, sizeof(qbuf)); 603 memset(qbuf_planes, 0, sizeof(qbuf_planes)); 604 qbuf.index = index; 605 qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 606 qbuf.m.planes = qbuf_planes; 607 608 DCHECK_EQ(device_input_format_, frame->format()); 609 for (size_t i = 0; i < input_planes_count_; ++i) { 610 qbuf.m.planes[i].bytesused = 611 base::checked_cast<__u32>(media::VideoFrame::PlaneAllocationSize( 612 frame->format(), i, input_allocated_size_)); 613 614 switch (input_memory_type_) { 615 case V4L2_MEMORY_USERPTR: 616 qbuf.m.planes[i].length = qbuf.m.planes[i].bytesused; 617 qbuf.m.planes[i].m.userptr = 618 reinterpret_cast<unsigned long>(frame->data(i)); 619 DCHECK(qbuf.m.planes[i].m.userptr); 620 break; 621 622 case V4L2_MEMORY_DMABUF: 623 qbuf.m.planes[i].m.fd = frame->dmabuf_fd(i); 624 DCHECK_NE(qbuf.m.planes[i].m.fd, -1); 625 break; 626 627 default: 628 NOTREACHED(); 629 return false; 630 } 631 } 632 633 qbuf.memory = input_memory_type_; 634 qbuf.length = input_planes_count_; 635 636 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 637 input_record.at_device = true; 638 input_record.frame = frame; 639 encoder_input_queue_.pop_front(); 640 free_input_buffers_.pop_back(); 641 input_buffer_queued_count_++; 642 return true; 643} 644 645bool V4L2VideoEncodeAccelerator::EnqueueOutputRecord() { 646 DVLOG(3) << "EnqueueOutputRecord()"; 647 DCHECK(!free_output_buffers_.empty()); 648 DCHECK(!encoder_output_queue_.empty()); 649 650 // Enqueue an output (VIDEO_CAPTURE) buffer. 651 linked_ptr<BitstreamBufferRef> output_buffer = encoder_output_queue_.back(); 652 const int index = free_output_buffers_.back(); 653 OutputRecord& output_record = output_buffer_map_[index]; 654 DCHECK(!output_record.at_device); 655 DCHECK(!output_record.buffer_ref.get()); 656 struct v4l2_buffer qbuf; 657 struct v4l2_plane qbuf_planes[1]; 658 memset(&qbuf, 0, sizeof(qbuf)); 659 memset(qbuf_planes, 0, sizeof(qbuf_planes)); 660 qbuf.index = index; 661 qbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 662 qbuf.memory = V4L2_MEMORY_MMAP; 663 qbuf.m.planes = qbuf_planes; 664 qbuf.length = 1; 665 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QBUF, &qbuf); 666 output_record.at_device = true; 667 output_record.buffer_ref = output_buffer; 668 encoder_output_queue_.pop_back(); 669 free_output_buffers_.pop_back(); 670 output_buffer_queued_count_++; 671 return true; 672} 673 674bool V4L2VideoEncodeAccelerator::StartDevicePoll() { 675 DVLOG(3) << "StartDevicePoll()"; 676 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 677 DCHECK(!device_poll_thread_.IsRunning()); 678 679 // Start up the device poll thread and schedule its first DevicePollTask(). 680 if (!device_poll_thread_.Start()) { 681 DLOG(ERROR) << "StartDevicePoll(): Device thread failed to start"; 682 NOTIFY_ERROR(kPlatformFailureError); 683 return false; 684 } 685 // Enqueue a poll task with no devices to poll on -- it will wait only on the 686 // interrupt fd. 687 device_poll_thread_.message_loop()->PostTask( 688 FROM_HERE, 689 base::Bind(&V4L2VideoEncodeAccelerator::DevicePollTask, 690 base::Unretained(this), 691 false)); 692 693 return true; 694} 695 696bool V4L2VideoEncodeAccelerator::StopDevicePoll() { 697 DVLOG(3) << "StopDevicePoll()"; 698 699 // Signal the DevicePollTask() to stop, and stop the device poll thread. 700 if (!device_->SetDevicePollInterrupt()) 701 return false; 702 device_poll_thread_.Stop(); 703 // Clear the interrupt now, to be sure. 704 if (!device_->ClearDevicePollInterrupt()) 705 return false; 706 707 if (input_streamon_) { 708 __u32 type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 709 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 710 } 711 input_streamon_ = false; 712 713 if (output_streamon_) { 714 __u32 type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 715 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_STREAMOFF, &type); 716 } 717 output_streamon_ = false; 718 719 // Reset all our accounting info. 720 encoder_input_queue_.clear(); 721 free_input_buffers_.clear(); 722 for (size_t i = 0; i < input_buffer_map_.size(); ++i) { 723 InputRecord& input_record = input_buffer_map_[i]; 724 input_record.at_device = false; 725 input_record.frame = NULL; 726 free_input_buffers_.push_back(i); 727 } 728 input_buffer_queued_count_ = 0; 729 730 free_output_buffers_.clear(); 731 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 732 OutputRecord& output_record = output_buffer_map_[i]; 733 output_record.at_device = false; 734 output_record.buffer_ref.reset(); 735 free_output_buffers_.push_back(i); 736 } 737 output_buffer_queued_count_ = 0; 738 739 encoder_output_queue_.clear(); 740 741 DVLOG(3) << "StopDevicePoll(): device poll stopped"; 742 return true; 743} 744 745void V4L2VideoEncodeAccelerator::DevicePollTask(bool poll_device) { 746 DVLOG(3) << "DevicePollTask()"; 747 DCHECK_EQ(device_poll_thread_.message_loop(), base::MessageLoop::current()); 748 749 bool event_pending; 750 if (!device_->Poll(poll_device, &event_pending)) { 751 NOTIFY_ERROR(kPlatformFailureError); 752 return; 753 } 754 755 // All processing should happen on ServiceDeviceTask(), since we shouldn't 756 // touch encoder state from this thread. 757 encoder_thread_.message_loop()->PostTask( 758 FROM_HERE, 759 base::Bind(&V4L2VideoEncodeAccelerator::ServiceDeviceTask, 760 base::Unretained(this))); 761} 762 763void V4L2VideoEncodeAccelerator::NotifyError(Error error) { 764 DVLOG(1) << "NotifyError(): error=" << error; 765 766 if (!child_message_loop_proxy_->BelongsToCurrentThread()) { 767 child_message_loop_proxy_->PostTask( 768 FROM_HERE, 769 base::Bind( 770 &V4L2VideoEncodeAccelerator::NotifyError, weak_this_, error)); 771 return; 772 } 773 774 if (client_) { 775 client_->NotifyError(error); 776 client_ptr_factory_.reset(); 777 } 778} 779 780void V4L2VideoEncodeAccelerator::SetEncoderState(State state) { 781 DVLOG(3) << "SetEncoderState(): state=" << state; 782 783 // We can touch encoder_state_ only if this is the encoder thread or the 784 // encoder thread isn't running. 785 if (encoder_thread_.message_loop() != NULL && 786 encoder_thread_.message_loop() != base::MessageLoop::current()) { 787 encoder_thread_.message_loop()->PostTask( 788 FROM_HERE, 789 base::Bind(&V4L2VideoEncodeAccelerator::SetEncoderState, 790 base::Unretained(this), 791 state)); 792 } else { 793 encoder_state_ = state; 794 } 795} 796 797void V4L2VideoEncodeAccelerator::RequestEncodingParametersChangeTask( 798 uint32 bitrate, 799 uint32 framerate) { 800 DVLOG(3) << "RequestEncodingParametersChangeTask(): bitrate=" << bitrate 801 << ", framerate=" << framerate; 802 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 803 804 if (bitrate < 1) 805 bitrate = 1; 806 if (framerate < 1) 807 framerate = 1; 808 809 struct v4l2_ext_control ctrls[1]; 810 struct v4l2_ext_controls control; 811 memset(&ctrls, 0, sizeof(ctrls)); 812 memset(&control, 0, sizeof(control)); 813 ctrls[0].id = V4L2_CID_MPEG_VIDEO_BITRATE; 814 ctrls[0].value = bitrate; 815 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; 816 control.count = arraysize(ctrls); 817 control.controls = ctrls; 818 IOCTL_OR_ERROR_RETURN(VIDIOC_S_EXT_CTRLS, &control); 819 820 struct v4l2_streamparm parms; 821 memset(&parms, 0, sizeof(parms)); 822 parms.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 823 // Note that we are provided "frames per second" but V4L2 expects "time per 824 // frame"; hence we provide the reciprocal of the framerate here. 825 parms.parm.output.timeperframe.numerator = 1; 826 parms.parm.output.timeperframe.denominator = framerate; 827 IOCTL_OR_ERROR_RETURN(VIDIOC_S_PARM, &parms); 828} 829 830bool V4L2VideoEncodeAccelerator::SetOutputFormat( 831 media::VideoCodecProfile output_profile) { 832 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 833 DCHECK(!input_streamon_); 834 DCHECK(!output_streamon_); 835 836 output_format_fourcc_ = 837 V4L2Device::VideoCodecProfileToV4L2PixFmt(output_profile); 838 if (!output_format_fourcc_) { 839 DLOG(ERROR) << "Initialize(): invalid output_profile=" << output_profile; 840 return false; 841 } 842 843 output_buffer_byte_size_ = kOutputBufferSize; 844 845 struct v4l2_format format; 846 memset(&format, 0, sizeof(format)); 847 format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 848 format.fmt.pix_mp.width = visible_size_.width(); 849 format.fmt.pix_mp.height = visible_size_.height(); 850 format.fmt.pix_mp.pixelformat = output_format_fourcc_; 851 format.fmt.pix_mp.plane_fmt[0].sizeimage = 852 base::checked_cast<__u32>(output_buffer_byte_size_); 853 format.fmt.pix_mp.num_planes = 1; 854 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 855 856 // Device might have adjusted the required output size. 857 size_t adjusted_output_buffer_size = 858 base::checked_cast<size_t>(format.fmt.pix_mp.plane_fmt[0].sizeimage); 859 DCHECK_GE(adjusted_output_buffer_size, output_buffer_byte_size_); 860 output_buffer_byte_size_ = adjusted_output_buffer_size; 861 862 return true; 863} 864 865bool V4L2VideoEncodeAccelerator::NegotiateInputFormat( 866 media::VideoFrame::Format input_format) { 867 DVLOG(3) << "NegotiateInputFormat()"; 868 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 869 DCHECK(!input_streamon_); 870 DCHECK(!output_streamon_); 871 872 device_input_format_ = media::VideoFrame::UNKNOWN; 873 input_planes_count_ = 0; 874 875 uint32 input_format_fourcc = 876 V4L2Device::VideoFrameFormatToV4L2PixFmt(input_format); 877 if (!input_format_fourcc) { 878 DVLOG(1) << "Unsupported input format"; 879 return false; 880 } 881 882 size_t input_planes_count = media::VideoFrame::NumPlanes(input_format); 883 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); 884 885 // First see if we the device can use the provided input_format directly. 886 struct v4l2_format format; 887 memset(&format, 0, sizeof(format)); 888 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 889 format.fmt.pix_mp.width = visible_size_.width(); 890 format.fmt.pix_mp.height = visible_size_.height(); 891 format.fmt.pix_mp.pixelformat = input_format_fourcc; 892 format.fmt.pix_mp.num_planes = input_planes_count; 893 if (device_->Ioctl(VIDIOC_S_FMT, &format) != 0) { 894 // Error or format unsupported by device, try to negotiate a fallback. 895 input_format_fourcc = device_->PreferredInputFormat(); 896 input_format = 897 V4L2Device::V4L2PixFmtToVideoFrameFormat(input_format_fourcc); 898 if (input_format == media::VideoFrame::UNKNOWN) 899 return false; 900 901 input_planes_count = media::VideoFrame::NumPlanes(input_format); 902 DCHECK_LE(input_planes_count, static_cast<size_t>(VIDEO_MAX_PLANES)); 903 904 // Device might have adjusted parameters, reset them along with the format. 905 memset(&format, 0, sizeof(format)); 906 format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 907 format.fmt.pix_mp.width = visible_size_.width(); 908 format.fmt.pix_mp.height = visible_size_.height(); 909 format.fmt.pix_mp.pixelformat = input_format_fourcc; 910 format.fmt.pix_mp.num_planes = input_planes_count; 911 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_FMT, &format); 912 DCHECK_EQ(format.fmt.pix_mp.num_planes, input_planes_count); 913 } 914 915 // Take device-adjusted sizes for allocated size. 916 input_allocated_size_ = V4L2Device::CodedSizeFromV4L2Format(format); 917 DCHECK(gfx::Rect(input_allocated_size_).Contains(gfx::Rect(visible_size_))); 918 919 device_input_format_ = input_format; 920 input_planes_count_ = input_planes_count; 921 return true; 922} 923 924bool V4L2VideoEncodeAccelerator::SetFormats( 925 media::VideoFrame::Format input_format, 926 media::VideoCodecProfile output_profile) { 927 DVLOG(3) << "SetFormats()"; 928 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 929 DCHECK(!input_streamon_); 930 DCHECK(!output_streamon_); 931 932 if (!SetOutputFormat(output_profile)) 933 return false; 934 935 if (!NegotiateInputFormat(input_format)) 936 return false; 937 938 struct v4l2_crop crop; 939 memset(&crop, 0, sizeof(crop)); 940 crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; 941 crop.c.left = 0; 942 crop.c.top = 0; 943 crop.c.width = visible_size_.width(); 944 crop.c.height = visible_size_.height(); 945 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_CROP, &crop); 946 947 return true; 948} 949 950bool V4L2VideoEncodeAccelerator::InitControls() { 951 struct v4l2_ext_control ctrls[9]; 952 struct v4l2_ext_controls control; 953 memset(&ctrls, 0, sizeof(ctrls)); 954 memset(&control, 0, sizeof(control)); 955 // No B-frames, for lowest decoding latency. 956 ctrls[0].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; 957 ctrls[0].value = 0; 958 // Enable frame-level bitrate control. 959 ctrls[1].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; 960 ctrls[1].value = 1; 961 // Enable "tight" bitrate mode. For this to work properly, frame- and mb-level 962 // bitrate controls have to be enabled as well. 963 ctrls[2].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; 964 ctrls[2].value = 1; 965 // Force bitrate control to average over a GOP (for tight bitrate 966 // tolerance). 967 ctrls[3].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; 968 ctrls[3].value = 1; 969 // Quantization parameter maximum value (for variable bitrate control). 970 ctrls[4].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; 971 ctrls[4].value = 51; 972 // Separate stream header so we can cache it and insert into the stream. 973 ctrls[5].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; 974 ctrls[5].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; 975 // Enable macroblock-level bitrate control. 976 ctrls[6].id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE; 977 ctrls[6].value = 1; 978 // Use H.264 level 4.0 to match the supported max resolution. 979 ctrls[7].id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; 980 ctrls[7].value = V4L2_MPEG_VIDEO_H264_LEVEL_4_0; 981 // Disable periodic key frames. 982 ctrls[8].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; 983 ctrls[8].value = 0; 984 control.ctrl_class = V4L2_CTRL_CLASS_MPEG; 985 control.count = arraysize(ctrls); 986 control.controls = ctrls; 987 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_S_EXT_CTRLS, &control); 988 989 return true; 990} 991 992bool V4L2VideoEncodeAccelerator::CreateInputBuffers() { 993 DVLOG(3) << "CreateInputBuffers()"; 994 // This function runs on encoder_thread_ after output buffers have been 995 // provided by the client. 996 DCHECK_EQ(encoder_thread_.message_loop(), base::MessageLoop::current()); 997 DCHECK(!input_streamon_); 998 999 struct v4l2_requestbuffers reqbufs; 1000 memset(&reqbufs, 0, sizeof(reqbufs)); 1001 // Driver will modify to the appropriate number of buffers. 1002 reqbufs.count = 1; 1003 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1004 // TODO(posciak): Once we start doing zero-copy, we should decide based on 1005 // the current pipeline setup which memory type to use. This should probably 1006 // be decided based on an argument to Initialize(). 1007 if (image_processor_.get()) 1008 input_memory_type_ = V4L2_MEMORY_DMABUF; 1009 else 1010 input_memory_type_ = V4L2_MEMORY_USERPTR; 1011 1012 reqbufs.memory = input_memory_type_; 1013 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 1014 1015 DCHECK(input_buffer_map_.empty()); 1016 input_buffer_map_.resize(reqbufs.count); 1017 for (size_t i = 0; i < input_buffer_map_.size(); ++i) 1018 free_input_buffers_.push_back(i); 1019 1020 return true; 1021} 1022 1023bool V4L2VideoEncodeAccelerator::CreateOutputBuffers() { 1024 DVLOG(3) << "CreateOutputBuffers()"; 1025 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 1026 DCHECK(!output_streamon_); 1027 1028 struct v4l2_requestbuffers reqbufs; 1029 memset(&reqbufs, 0, sizeof(reqbufs)); 1030 reqbufs.count = kOutputBufferCount; 1031 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1032 reqbufs.memory = V4L2_MEMORY_MMAP; 1033 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_REQBUFS, &reqbufs); 1034 1035 DCHECK(output_buffer_map_.empty()); 1036 output_buffer_map_.resize(reqbufs.count); 1037 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 1038 struct v4l2_plane planes[1]; 1039 struct v4l2_buffer buffer; 1040 memset(&buffer, 0, sizeof(buffer)); 1041 memset(planes, 0, sizeof(planes)); 1042 buffer.index = i; 1043 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1044 buffer.memory = V4L2_MEMORY_MMAP; 1045 buffer.m.planes = planes; 1046 buffer.length = arraysize(planes); 1047 IOCTL_OR_ERROR_RETURN_FALSE(VIDIOC_QUERYBUF, &buffer); 1048 void* address = device_->Mmap(NULL, 1049 buffer.m.planes[0].length, 1050 PROT_READ | PROT_WRITE, 1051 MAP_SHARED, 1052 buffer.m.planes[0].m.mem_offset); 1053 if (address == MAP_FAILED) { 1054 DPLOG(ERROR) << "CreateOutputBuffers(): mmap() failed"; 1055 return false; 1056 } 1057 output_buffer_map_[i].address = address; 1058 output_buffer_map_[i].length = buffer.m.planes[0].length; 1059 free_output_buffers_.push_back(i); 1060 } 1061 1062 return true; 1063} 1064 1065void V4L2VideoEncodeAccelerator::DestroyInputBuffers() { 1066 DVLOG(3) << "DestroyInputBuffers()"; 1067 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 1068 DCHECK(!input_streamon_); 1069 1070 struct v4l2_requestbuffers reqbufs; 1071 memset(&reqbufs, 0, sizeof(reqbufs)); 1072 reqbufs.count = 0; 1073 reqbufs.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; 1074 reqbufs.memory = input_memory_type_; 1075 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 1076 1077 input_buffer_map_.clear(); 1078 free_input_buffers_.clear(); 1079} 1080 1081void V4L2VideoEncodeAccelerator::DestroyOutputBuffers() { 1082 DVLOG(3) << "DestroyOutputBuffers()"; 1083 DCHECK(child_message_loop_proxy_->BelongsToCurrentThread()); 1084 DCHECK(!output_streamon_); 1085 1086 for (size_t i = 0; i < output_buffer_map_.size(); ++i) { 1087 if (output_buffer_map_[i].address != NULL) 1088 device_->Munmap(output_buffer_map_[i].address, 1089 output_buffer_map_[i].length); 1090 } 1091 1092 struct v4l2_requestbuffers reqbufs; 1093 memset(&reqbufs, 0, sizeof(reqbufs)); 1094 reqbufs.count = 0; 1095 reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; 1096 reqbufs.memory = V4L2_MEMORY_MMAP; 1097 IOCTL_OR_LOG_ERROR(VIDIOC_REQBUFS, &reqbufs); 1098 1099 output_buffer_map_.clear(); 1100 free_output_buffers_.clear(); 1101} 1102 1103} // namespace content 1104