1// Copyright 2013 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/renderer/media/rtc_video_encoder.h"
6
7#include "base/bind.h"
8#include "base/location.h"
9#include "base/logging.h"
10#include "base/memory/scoped_vector.h"
11#include "base/message_loop/message_loop_proxy.h"
12#include "base/metrics/histogram.h"
13#include "base/rand_util.h"
14#include "base/synchronization/waitable_event.h"
15#include "media/base/bitstream_buffer.h"
16#include "media/base/video_frame.h"
17#include "media/base/video_util.h"
18#include "media/filters/gpu_video_accelerator_factories.h"
19#include "media/filters/h264_parser.h"
20#include "media/video/video_encode_accelerator.h"
21#include "third_party/webrtc/system_wrappers/interface/tick_util.h"
22
23#define NOTIFY_ERROR(x)                             \
24  do {                                              \
25    DLOG(ERROR) << "calling NotifyError(): " << x;  \
26    NotifyError(x);                                 \
27  } while (0)
28
29namespace content {
30
31namespace {
32
33// Populates struct webrtc::RTPFragmentationHeader for H264 codec.
34// Each entry specifies the offset and length (excluding start code) of a NALU.
35// Returns true if successful.
36bool GetRTPFragmentationHeaderH264(webrtc::RTPFragmentationHeader* header,
37                                   const uint8_t* data, uint32_t length) {
38  media::H264Parser parser;
39  parser.SetStream(data, length);
40
41  std::vector<media::H264NALU> nalu_vector;
42  while (true) {
43    media::H264NALU nalu;
44    const media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
45    if (result == media::H264Parser::kOk) {
46      nalu_vector.push_back(nalu);
47    } else if (result == media::H264Parser::kEOStream) {
48      break;
49    } else {
50      DLOG(ERROR) << "Unexpected H264 parser result";
51      return false;
52    }
53  }
54
55  header->VerifyAndAllocateFragmentationHeader(nalu_vector.size());
56  for (size_t i = 0; i < nalu_vector.size(); ++i) {
57    header->fragmentationOffset[i] = nalu_vector[i].data - data;
58    header->fragmentationLength[i] = nalu_vector[i].size;
59    header->fragmentationPlType[i] = 0;
60    header->fragmentationTimeDiff[i] = 0;
61  }
62  return true;
63}
64
65}  // namespace
66
67// This private class of RTCVideoEncoder does the actual work of communicating
68// with a media::VideoEncodeAccelerator for handling video encoding.  It can
69// be created on any thread, but should subsequently be posted to (and Destroy()
70// called on) a single thread.  Callbacks to RTCVideoEncoder are posted to the
71// thread on which the instance was constructed.
72//
73// This class separates state related to the thread that RTCVideoEncoder
74// operates on (presently the libjingle worker thread) from the thread that
75// |gpu_factories_| provides for accelerator operations (presently the media
76// thread).  The RTCVideoEncoder class can be deleted directly by WebRTC, while
77// RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
78class RTCVideoEncoder::Impl
79    : public media::VideoEncodeAccelerator::Client,
80      public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
81 public:
82  Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
83       const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
84
85  // Create the VEA and call Initialize() on it.  Called once per instantiation,
86  // and then the instance is bound forevermore to whichever thread made the
87  // call.
88  // RTCVideoEncoder expects to be able to call this function synchronously from
89  // its own thread, hence the |async_waiter| and |async_retval| arguments.
90  void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
91                              uint32 bitrate,
92                              media::VideoCodecProfile profile,
93                              base::WaitableEvent* async_waiter,
94                              int32_t* async_retval);
95  // Enqueue a frame from WebRTC for encoding.
96  // RTCVideoEncoder expects to be able to call this function synchronously from
97  // its own thread, hence the |async_waiter| and |async_retval| arguments.
98  void Enqueue(const webrtc::I420VideoFrame* input_frame,
99               bool force_keyframe,
100               base::WaitableEvent* async_waiter,
101               int32_t* async_retval);
102
103  // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
104  // RTCVideoEncoder::ReturnEncodedImage() function.  When that is complete,
105  // the buffer is returned to Impl by its index using this function.
106  void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
107
108  // Request encoding parameter change for the underlying encoder.
109  void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
110
111  // Destroy this Impl's encoder.  The destructor is not explicitly called, as
112  // Impl is a base::RefCountedThreadSafe.
113  void Destroy();
114
115  // media::VideoEncodeAccelerator::Client implementation.
116  virtual void RequireBitstreamBuffers(unsigned int input_count,
117                                       const gfx::Size& input_coded_size,
118                                       size_t output_buffer_size) OVERRIDE;
119  virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
120                                    size_t payload_size,
121                                    bool key_frame) OVERRIDE;
122  virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
123
124 private:
125  friend class base::RefCountedThreadSafe<Impl>;
126
127  enum {
128    kInputBufferExtraCount = 1,  // The number of input buffers allocated, more
129                                 // than what is requested by
130                                 // VEA::RequireBitstreamBuffers().
131    kOutputBufferCount = 3,
132  };
133
134  virtual ~Impl();
135
136  // Perform encoding on an input frame from the input queue.
137  void EncodeOneFrame();
138
139  // Notify that an input frame is finished for encoding.  |index| is the index
140  // of the completed frame in |input_buffers_|.
141  void EncodeFrameFinished(int index);
142
143  // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
144  void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
145  void SignalAsyncWaiter(int32_t retval);
146
147  base::ThreadChecker thread_checker_;
148
149  // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
150  // notifications.
151  const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
152
153  // The message loop on which to post callbacks to |weak_encoder_|.
154  const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
155
156  // Factory for creating VEAs, shared memory buffers, etc.
157  const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
158
159  // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
160  // Do this by waiting on the |async_waiter_| and returning the return value in
161  // |async_retval_| when initialization completes, encoding completes, or
162  // an error occurs.
163  base::WaitableEvent* async_waiter_;
164  int32_t* async_retval_;
165
166  // The underlying VEA to perform encoding on.
167  scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
168
169  // Next input frame.  Since there is at most one next frame, a single-element
170  // queue is sufficient.
171  const webrtc::I420VideoFrame* input_next_frame_;
172
173  // Whether to encode a keyframe next.
174  bool input_next_frame_keyframe_;
175
176  // Frame sizes.
177  gfx::Size input_frame_coded_size_;
178  gfx::Size input_visible_size_;
179
180  // Shared memory buffers for input/output with the VEA.
181  ScopedVector<base::SharedMemory> input_buffers_;
182  ScopedVector<base::SharedMemory> output_buffers_;
183
184  // Input buffers ready to be filled with input from Encode().  As a LIFO since
185  // we don't care about ordering.
186  std::vector<int> input_buffers_free_;
187
188  // The number of output buffers ready to be filled with output from the
189  // encoder.
190  int output_buffers_free_count_;
191
192  // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
193  uint16 picture_id_;
194
195  DISALLOW_COPY_AND_ASSIGN(Impl);
196};
197
198RTCVideoEncoder::Impl::Impl(
199    const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
200    const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
201    : weak_encoder_(weak_encoder),
202      encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
203      gpu_factories_(gpu_factories),
204      async_waiter_(NULL),
205      async_retval_(NULL),
206      input_next_frame_(NULL),
207      input_next_frame_keyframe_(false),
208      output_buffers_free_count_(0) {
209  thread_checker_.DetachFromThread();
210  // Picture ID should start on a random number.
211  picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF));
212}
213
214void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
215    const gfx::Size& input_visible_size,
216    uint32 bitrate,
217    media::VideoCodecProfile profile,
218    base::WaitableEvent* async_waiter,
219    int32_t* async_retval) {
220  DVLOG(3) << "Impl::CreateAndInitializeVEA()";
221  DCHECK(thread_checker_.CalledOnValidThread());
222
223  RegisterAsyncWaiter(async_waiter, async_retval);
224
225  // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
226  if (bitrate > kuint32max / 1000) {
227    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
228    return;
229  }
230
231  video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator().Pass();
232  if (!video_encoder_) {
233    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
234    return;
235  }
236  input_visible_size_ = input_visible_size;
237  if (!video_encoder_->Initialize(media::VideoFrame::I420,
238                                  input_visible_size_,
239                                  profile,
240                                  bitrate * 1000,
241                                  this)) {
242    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
243    return;
244  }
245}
246
247void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
248                                    bool force_keyframe,
249                                    base::WaitableEvent* async_waiter,
250                                    int32_t* async_retval) {
251  DVLOG(3) << "Impl::Enqueue()";
252  DCHECK(thread_checker_.CalledOnValidThread());
253  DCHECK(!input_next_frame_);
254
255  RegisterAsyncWaiter(async_waiter, async_retval);
256  // If there are no free input and output buffers, drop the frame to avoid a
257  // deadlock. If there is a free input buffer, EncodeOneFrame will run and
258  // unblock Encode(). If there are no free input buffers but there is a free
259  // output buffer, EncodeFrameFinished will be called later to unblock
260  // Encode().
261  //
262  // The caller of Encode() holds a webrtc lock. The deadlock happens when:
263  // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame().
264  // (2) There are no free input buffers and they cannot be freed because
265  //     the encoder has no output buffers.
266  // (3) Output buffers cannot be freed because ReturnEncodedImage is queued
267  //     on libjingle worker thread to be run. But the worker thread is waiting
268  //     for the same webrtc lock held by the caller of Encode().
269  //
270  // Dropping a frame is fine. The encoder has been filled with all input
271  // buffers. Returning an error in Encode() is not fatal and WebRTC will just
272  // continue. If this is a key frame, WebRTC will request a key frame again.
273  // Besides, webrtc will drop a frame if Encode() blocks too long.
274  if (input_buffers_free_.empty() && output_buffers_free_count_ == 0) {
275    DVLOG(2) << "Run out of input and output buffers. Drop the frame.";
276    SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
277    return;
278  }
279  input_next_frame_ = input_frame;
280  input_next_frame_keyframe_ = force_keyframe;
281
282  if (!input_buffers_free_.empty())
283    EncodeOneFrame();
284}
285
286void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
287    int32 bitstream_buffer_id) {
288  DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
289              "bitstream_buffer_id=" << bitstream_buffer_id;
290  DCHECK(thread_checker_.CalledOnValidThread());
291  if (video_encoder_) {
292    video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
293        bitstream_buffer_id,
294        output_buffers_[bitstream_buffer_id]->handle(),
295        output_buffers_[bitstream_buffer_id]->mapped_size()));
296    output_buffers_free_count_++;
297  }
298}
299
300void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
301                                                            uint32 framerate) {
302  DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
303           << ", framerate=" << framerate;
304  DCHECK(thread_checker_.CalledOnValidThread());
305
306  // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
307  if (bitrate > kuint32max / 1000) {
308    NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
309    return;
310  }
311
312  if (video_encoder_)
313    video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
314}
315
316void RTCVideoEncoder::Impl::Destroy() {
317  DVLOG(3) << "Impl::Destroy()";
318  DCHECK(thread_checker_.CalledOnValidThread());
319  video_encoder_.reset();
320}
321
322void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
323    unsigned int input_count,
324    const gfx::Size& input_coded_size,
325    size_t output_buffer_size) {
326  DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
327           << ", input_coded_size=" << input_coded_size.ToString()
328           << ", output_buffer_size=" << output_buffer_size;
329  DCHECK(thread_checker_.CalledOnValidThread());
330
331  if (!video_encoder_)
332    return;
333
334  input_frame_coded_size_ = input_coded_size;
335
336  for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
337    base::SharedMemory* shm =
338        gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
339            media::VideoFrame::I420, input_coded_size));
340    if (!shm) {
341      DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
342                     "failed to create input buffer " << i;
343      NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
344      return;
345    }
346    input_buffers_.push_back(shm);
347    input_buffers_free_.push_back(i);
348  }
349
350  for (int i = 0; i < kOutputBufferCount; ++i) {
351    base::SharedMemory* shm =
352        gpu_factories_->CreateSharedMemory(output_buffer_size);
353    if (!shm) {
354      DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
355                     "failed to create output buffer " << i;
356      NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
357      return;
358    }
359    output_buffers_.push_back(shm);
360  }
361
362  // Immediately provide all output buffers to the VEA.
363  for (size_t i = 0; i < output_buffers_.size(); ++i) {
364    video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
365        i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
366    output_buffers_free_count_++;
367  }
368  SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
369}
370
371void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
372                                                 size_t payload_size,
373                                                 bool key_frame) {
374  DVLOG(3) << "Impl::BitstreamBufferReady(): "
375              "bitstream_buffer_id=" << bitstream_buffer_id
376           << ", payload_size=" << payload_size
377           << ", key_frame=" << key_frame;
378  DCHECK(thread_checker_.CalledOnValidThread());
379
380  if (bitstream_buffer_id < 0 ||
381      bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
382    DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
383                << bitstream_buffer_id;
384    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
385    return;
386  }
387  base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
388  if (payload_size > output_buffer->mapped_size()) {
389    DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
390                << payload_size;
391    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
392    return;
393  }
394  output_buffers_free_count_--;
395
396  // Use webrtc timestamps to ensure correct RTP sender behavior.
397  // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
398  const int64 capture_time_us = webrtc::TickTime::MicrosecondTimestamp();
399
400  // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
401  int64 capture_time_ms = capture_time_us / 1000;
402  uint32_t rtp_timestamp = static_cast<uint32_t>(capture_time_us * 90 / 1000);
403
404  scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
405      reinterpret_cast<uint8_t*>(output_buffer->memory()),
406      payload_size,
407      output_buffer->mapped_size()));
408  image->_encodedWidth = input_visible_size_.width();
409  image->_encodedHeight = input_visible_size_.height();
410  image->_timeStamp = rtp_timestamp;
411  image->capture_time_ms_ = capture_time_ms;
412  image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
413  image->_completeFrame = true;
414
415  encoder_message_loop_proxy_->PostTask(
416      FROM_HERE,
417      base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
418                 weak_encoder_,
419                 base::Passed(&image),
420                 bitstream_buffer_id,
421                 picture_id_));
422  // Picture ID must wrap after reaching the maximum.
423  picture_id_ = (picture_id_ + 1) & 0x7FFF;
424}
425
426void RTCVideoEncoder::Impl::NotifyError(
427    media::VideoEncodeAccelerator::Error error) {
428  DVLOG(3) << "Impl::NotifyError(): error=" << error;
429  DCHECK(thread_checker_.CalledOnValidThread());
430  int32_t retval;
431  switch (error) {
432    case media::VideoEncodeAccelerator::kInvalidArgumentError:
433      retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
434      break;
435    default:
436      retval = WEBRTC_VIDEO_CODEC_ERROR;
437  }
438
439  video_encoder_.reset();
440
441  if (async_waiter_) {
442    SignalAsyncWaiter(retval);
443  } else {
444    encoder_message_loop_proxy_->PostTask(
445        FROM_HERE,
446        base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
447  }
448}
449
450RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
451
452void RTCVideoEncoder::Impl::EncodeOneFrame() {
453  DVLOG(3) << "Impl::EncodeOneFrame()";
454  DCHECK(thread_checker_.CalledOnValidThread());
455  DCHECK(input_next_frame_);
456  DCHECK(!input_buffers_free_.empty());
457
458  // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
459  // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
460  // Encode() gets destroyed early.  Handle this by resetting our
461  // input_next_frame_* state before we hand off the VideoFrame to the VEA.
462  const webrtc::I420VideoFrame* next_frame = input_next_frame_;
463  bool next_frame_keyframe = input_next_frame_keyframe_;
464  input_next_frame_ = NULL;
465  input_next_frame_keyframe_ = false;
466
467  if (!video_encoder_) {
468    SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
469    return;
470  }
471
472  const int index = input_buffers_free_.back();
473  base::SharedMemory* input_buffer = input_buffers_[index];
474  scoped_refptr<media::VideoFrame> frame =
475      media::VideoFrame::WrapExternalPackedMemory(
476          media::VideoFrame::I420,
477          input_frame_coded_size_,
478          gfx::Rect(input_visible_size_),
479          input_visible_size_,
480          reinterpret_cast<uint8*>(input_buffer->memory()),
481          input_buffer->mapped_size(),
482          input_buffer->handle(),
483          base::TimeDelta(),
484          base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
485  if (!frame.get()) {
486    DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
487    NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
488    return;
489  }
490
491  // Do a strided copy of the input frame to match the input requirements for
492  // the encoder.
493  // TODO(sheu): support zero-copy from WebRTC.  http://crbug.com/269312
494  media::CopyYPlane(next_frame->buffer(webrtc::kYPlane),
495                    next_frame->stride(webrtc::kYPlane),
496                    next_frame->height(),
497                    frame.get());
498  media::CopyUPlane(next_frame->buffer(webrtc::kUPlane),
499                    next_frame->stride(webrtc::kUPlane),
500                    next_frame->height(),
501                    frame.get());
502  media::CopyVPlane(next_frame->buffer(webrtc::kVPlane),
503                    next_frame->stride(webrtc::kVPlane),
504                    next_frame->height(),
505                    frame.get());
506
507  video_encoder_->Encode(frame, next_frame_keyframe);
508  input_buffers_free_.pop_back();
509  SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
510}
511
512void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
513  DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
514  DCHECK(thread_checker_.CalledOnValidThread());
515  DCHECK_GE(index, 0);
516  DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
517  input_buffers_free_.push_back(index);
518  if (input_next_frame_)
519    EncodeOneFrame();
520}
521
522void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
523                                                int32_t* retval) {
524  DCHECK(thread_checker_.CalledOnValidThread());
525  DCHECK(!async_waiter_);
526  DCHECK(!async_retval_);
527  async_waiter_ = waiter;
528  async_retval_ = retval;
529}
530
531void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
532  DCHECK(thread_checker_.CalledOnValidThread());
533  *async_retval_ = retval;
534  async_waiter_->Signal();
535  async_retval_ = NULL;
536  async_waiter_ = NULL;
537}
538
539#undef NOTIFY_ERROR
540
541////////////////////////////////////////////////////////////////////////////////
542//
543// RTCVideoEncoder
544//
545////////////////////////////////////////////////////////////////////////////////
546
547RTCVideoEncoder::RTCVideoEncoder(
548    webrtc::VideoCodecType type,
549    media::VideoCodecProfile profile,
550    const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
551    : video_codec_type_(type),
552      video_codec_profile_(profile),
553      gpu_factories_(gpu_factories),
554      encoded_image_callback_(NULL),
555      impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
556      weak_factory_(this) {
557  DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
558}
559
560RTCVideoEncoder::~RTCVideoEncoder() {
561  DVLOG(3) << "~RTCVideoEncoder";
562  DCHECK(thread_checker_.CalledOnValidThread());
563  Release();
564  DCHECK(!impl_.get());
565}
566
567int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
568                                    int32_t number_of_cores,
569                                    uint32_t max_payload_size) {
570  DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
571           << ", width=" << codec_settings->width
572           << ", height=" << codec_settings->height
573           << ", startBitrate=" << codec_settings->startBitrate;
574  DCHECK(thread_checker_.CalledOnValidThread());
575  DCHECK(!impl_.get());
576
577  weak_factory_.InvalidateWeakPtrs();
578  impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_);
579  base::WaitableEvent initialization_waiter(true, false);
580  int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
581  gpu_factories_->GetTaskRunner()->PostTask(
582      FROM_HERE,
583      base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
584                 impl_,
585                 gfx::Size(codec_settings->width, codec_settings->height),
586                 codec_settings->startBitrate,
587                 video_codec_profile_,
588                 &initialization_waiter,
589                 &initialization_retval));
590
591  // webrtc::VideoEncoder expects this call to be synchronous.
592  initialization_waiter.Wait();
593  RecordInitEncodeUMA(initialization_retval);
594  return initialization_retval;
595}
596
597int32_t RTCVideoEncoder::Encode(
598    const webrtc::I420VideoFrame& input_image,
599    const webrtc::CodecSpecificInfo* codec_specific_info,
600    const std::vector<webrtc::VideoFrameType>* frame_types) {
601  DVLOG(3) << "Encode()";
602  if (!impl_.get()) {
603    DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
604    return impl_status_;
605  }
606
607  bool want_key_frame = frame_types && frame_types->size() &&
608                        frame_types->front() == webrtc::kKeyFrame;
609  base::WaitableEvent encode_waiter(true, false);
610  int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
611  gpu_factories_->GetTaskRunner()->PostTask(
612      FROM_HERE,
613      base::Bind(&RTCVideoEncoder::Impl::Enqueue,
614                 impl_,
615                 &input_image,
616                 want_key_frame,
617                 &encode_waiter,
618                 &encode_retval));
619
620  // webrtc::VideoEncoder expects this call to be synchronous.
621  encode_waiter.Wait();
622  DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
623  return encode_retval;
624}
625
626int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
627    webrtc::EncodedImageCallback* callback) {
628  DVLOG(3) << "RegisterEncodeCompleteCallback()";
629  DCHECK(thread_checker_.CalledOnValidThread());
630  if (!impl_.get()) {
631    DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
632    return impl_status_;
633  }
634
635  encoded_image_callback_ = callback;
636  return WEBRTC_VIDEO_CODEC_OK;
637}
638
639int32_t RTCVideoEncoder::Release() {
640  DVLOG(3) << "Release()";
641  DCHECK(thread_checker_.CalledOnValidThread());
642
643  if (impl_.get()) {
644    gpu_factories_->GetTaskRunner()->PostTask(
645        FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
646    impl_ = NULL;
647    weak_factory_.InvalidateWeakPtrs();
648    impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
649  }
650  return WEBRTC_VIDEO_CODEC_OK;
651}
652
653int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
654  DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
655           << ", rtt=" << rtt;
656  // Ignored.
657  return WEBRTC_VIDEO_CODEC_OK;
658}
659
660int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
661  DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
662           << ", frame_rate=" << frame_rate;
663  if (!impl_.get()) {
664    DVLOG(3) << "SetRates(): returning " << impl_status_;
665    return impl_status_;
666  }
667
668  gpu_factories_->GetTaskRunner()->PostTask(
669      FROM_HERE,
670      base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
671                 impl_,
672                 new_bit_rate,
673                 frame_rate));
674  return WEBRTC_VIDEO_CODEC_OK;
675}
676
677void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
678                                         int32 bitstream_buffer_id,
679                                         uint16 picture_id) {
680  DCHECK(thread_checker_.CalledOnValidThread());
681  DVLOG(3) << "ReturnEncodedImage(): "
682           << "bitstream_buffer_id=" << bitstream_buffer_id
683           << ", picture_id=" << picture_id;
684
685  if (!encoded_image_callback_)
686    return;
687
688  webrtc::RTPFragmentationHeader header;
689  memset(&header, 0, sizeof(header));
690  switch (video_codec_type_) {
691    case webrtc::kVideoCodecVP8:
692    case webrtc::kVideoCodecGeneric:
693      // Generate a header describing a single fragment.
694      // Note that webrtc treats the generic-type payload as an opaque buffer.
695      header.VerifyAndAllocateFragmentationHeader(1);
696      header.fragmentationOffset[0] = 0;
697      header.fragmentationLength[0] = image->_length;
698      header.fragmentationPlType[0] = 0;
699      header.fragmentationTimeDiff[0] = 0;
700      break;
701    case webrtc::kVideoCodecH264:
702      if (!GetRTPFragmentationHeaderH264(
703          &header, image->_buffer, image->_length)) {
704        DLOG(ERROR) << "Failed to get RTP fragmentation header for H264";
705        NotifyError(WEBRTC_VIDEO_CODEC_ERROR);
706        return;
707      }
708      break;
709    default:
710      NOTREACHED() << "Invalid video codec type";
711      return;
712  }
713
714  webrtc::CodecSpecificInfo info;
715  memset(&info, 0, sizeof(info));
716  info.codecType = video_codec_type_;
717  if (video_codec_type_ == webrtc::kVideoCodecVP8) {
718    info.codecSpecific.VP8.pictureId = picture_id;
719    info.codecSpecific.VP8.tl0PicIdx = -1;
720    info.codecSpecific.VP8.keyIdx = -1;
721  }
722
723  int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
724  if (retval < 0) {
725    DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
726             << retval;
727  }
728
729  // The call through webrtc::EncodedImageCallback is synchronous, so we can
730  // immediately recycle the output buffer back to the Impl.
731  gpu_factories_->GetTaskRunner()->PostTask(
732      FROM_HERE,
733      base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
734                 impl_,
735                 bitstream_buffer_id));
736}
737
738void RTCVideoEncoder::NotifyError(int32_t error) {
739  DCHECK(thread_checker_.CalledOnValidThread());
740  DVLOG(1) << "NotifyError(): error=" << error;
741
742  impl_status_ = error;
743  gpu_factories_->GetTaskRunner()->PostTask(
744      FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
745  impl_ = NULL;
746}
747
748void RTCVideoEncoder::RecordInitEncodeUMA(int32_t init_retval) {
749  UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
750                        init_retval == WEBRTC_VIDEO_CODEC_OK);
751  if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
752    UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
753                              video_codec_profile_,
754                              media::VIDEO_CODEC_PROFILE_MAX + 1);
755  }
756}
757
758}  // namespace content
759