1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 *
10 * This file contains the WEBRTC VP8 wrapper implementation
11 *
12 */
13
14#include "webrtc/modules/video_coding/codecs/vp8/vp8_impl.h"
15
16#include <stdlib.h>
17#include <string.h>
18#include <time.h>
19#include <vector>
20
21#include "vpx/vpx_encoder.h"
22#include "vpx/vpx_decoder.h"
23#include "vpx/vp8cx.h"
24#include "vpx/vp8dx.h"
25
26#include "webrtc/common.h"
27#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
28#include "webrtc/modules/interface/module_common_types.h"
29#include "webrtc/modules/video_coding/codecs/vp8/temporal_layers.h"
30#include "webrtc/modules/video_coding/codecs/vp8/reference_picture_selection.h"
31#include "webrtc/system_wrappers/interface/tick_util.h"
32#include "webrtc/system_wrappers/interface/trace_event.h"
33
34enum { kVp8ErrorPropagationTh = 30 };
35
36namespace webrtc {
37
38VP8EncoderImpl::VP8EncoderImpl()
39    : encoded_image_(),
40      encoded_complete_callback_(NULL),
41      inited_(false),
42      timestamp_(0),
43      picture_id_(0),
44      feedback_mode_(false),
45      cpu_speed_(-6),  // default value
46      rc_max_intra_target_(0),
47      token_partitions_(VP8_ONE_TOKENPARTITION),
48      rps_(new ReferencePictureSelection),
49      temporal_layers_(NULL),
50      encoder_(NULL),
51      config_(NULL),
52      raw_(NULL) {
53  memset(&codec_, 0, sizeof(codec_));
54  uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
55  srand(seed);
56}
57
58VP8EncoderImpl::~VP8EncoderImpl() {
59  Release();
60  delete rps_;
61}
62
63int VP8EncoderImpl::Release() {
64  if (encoded_image_._buffer != NULL) {
65    delete [] encoded_image_._buffer;
66    encoded_image_._buffer = NULL;
67  }
68  if (encoder_ != NULL) {
69    if (vpx_codec_destroy(encoder_)) {
70      return WEBRTC_VIDEO_CODEC_MEMORY;
71    }
72    delete encoder_;
73    encoder_ = NULL;
74  }
75  if (config_ != NULL) {
76    delete config_;
77    config_ = NULL;
78  }
79  if (raw_ != NULL) {
80    vpx_img_free(raw_);
81    raw_ = NULL;
82  }
83  delete temporal_layers_;
84  temporal_layers_ = NULL;
85  inited_ = false;
86  return WEBRTC_VIDEO_CODEC_OK;
87}
88
89int VP8EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
90                             uint32_t new_framerate) {
91  if (!inited_) {
92    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
93  }
94  if (encoder_->err) {
95    return WEBRTC_VIDEO_CODEC_ERROR;
96  }
97  if (new_framerate < 1) {
98    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
99  }
100  // update bit rate
101  if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) {
102    new_bitrate_kbit = codec_.maxBitrate;
103  }
104  config_->rc_target_bitrate = new_bitrate_kbit;  // in kbit/s
105  temporal_layers_->ConfigureBitrates(new_bitrate_kbit, codec_.maxBitrate,
106                                      new_framerate, config_);
107  codec_.maxFramerate = new_framerate;
108  quality_scaler_.ReportFramerate(new_framerate);
109
110  // update encoder context
111  if (vpx_codec_enc_config_set(encoder_, config_)) {
112    return WEBRTC_VIDEO_CODEC_ERROR;
113  }
114  return WEBRTC_VIDEO_CODEC_OK;
115}
116
117int VP8EncoderImpl::InitEncode(const VideoCodec* inst,
118                               int number_of_cores,
119                               uint32_t /*max_payload_size*/) {
120  if (inst == NULL) {
121    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
122  }
123  if (inst->maxFramerate < 1) {
124    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
125  }
126  // allow zero to represent an unspecified maxBitRate
127  if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
128    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
129  }
130  if (inst->width < 1 || inst->height < 1) {
131    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
132  }
133  if (number_of_cores < 1) {
134    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
135  }
136  feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
137
138  int retVal = Release();
139  if (retVal < 0) {
140    return retVal;
141  }
142  if (encoder_ == NULL) {
143    encoder_ = new vpx_codec_ctx_t;
144  }
145  if (config_ == NULL) {
146    config_ = new vpx_codec_enc_cfg_t;
147  }
148  timestamp_ = 0;
149
150  if (&codec_ != inst) {
151    codec_ = *inst;
152  }
153
154  // TODO(andresp): assert(inst->extra_options) and cleanup.
155  Config default_options;
156  const Config& options =
157      inst->extra_options ? *inst->extra_options : default_options;
158
159  int num_temporal_layers = inst->codecSpecific.VP8.numberOfTemporalLayers > 1 ?
160      inst->codecSpecific.VP8.numberOfTemporalLayers : 1;
161  assert(temporal_layers_ == NULL);
162  temporal_layers_ = options.Get<TemporalLayers::Factory>()
163                         .Create(num_temporal_layers, rand());
164  // random start 16 bits is enough.
165  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
166
167  // allocate memory for encoded image
168  if (encoded_image_._buffer != NULL) {
169    delete [] encoded_image_._buffer;
170  }
171  encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
172  encoded_image_._buffer = new uint8_t[encoded_image_._size];
173  encoded_image_._completeFrame = true;
174
175  // Creating a wrapper to the image - setting image data to NULL. Actual
176  // pointer will be set in encode. Setting align to 1, as it is meaningless
177  // (actual memory is not allocated).
178  raw_ = vpx_img_wrap(NULL, VPX_IMG_FMT_I420, codec_.width, codec_.height,
179                      1, NULL);
180  // populate encoder configuration with default values
181  if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), config_, 0)) {
182    return WEBRTC_VIDEO_CODEC_ERROR;
183  }
184  config_->g_w = codec_.width;
185  config_->g_h = codec_.height;
186  config_->rc_target_bitrate = inst->startBitrate;  // in kbit/s
187  temporal_layers_->ConfigureBitrates(inst->startBitrate, inst->maxBitrate,
188                                      inst->maxFramerate, config_);
189  // setting the time base of the codec
190  config_->g_timebase.num = 1;
191  config_->g_timebase.den = 90000;
192
193  // Set the error resilience mode according to user settings.
194  switch (inst->codecSpecific.VP8.resilience) {
195    case kResilienceOff:
196      config_->g_error_resilient = 0;
197      if (num_temporal_layers > 1) {
198        // Must be on for temporal layers (i.e., |num_temporal_layers| > 1).
199        config_->g_error_resilient = 1;
200      }
201      break;
202    case kResilientStream:
203      config_->g_error_resilient = 1;  // TODO(holmer): Replace with
204      // VPX_ERROR_RESILIENT_DEFAULT when we
205      // drop support for libvpx 9.6.0.
206      break;
207    case kResilientFrames:
208#ifdef INDEPENDENT_PARTITIONS
209      config_->g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT |
210      VPX_ERROR_RESILIENT_PARTITIONS;
211      break;
212#else
213      return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  // Not supported
214#endif
215  }
216  config_->g_lag_in_frames = 0;  // 0- no frame lagging
217
218  if (codec_.width * codec_.height >= 1920 * 1080 && number_of_cores > 8) {
219    config_->g_threads = 8;  // 8 threads for 1080p on high perf machines.
220  } else if (codec_.width * codec_.height > 1280 * 960 &&
221      number_of_cores >= 6) {
222    config_->g_threads = 3;  // 3 threads for 1080p.
223  } else if (codec_.width * codec_.height > 640 * 480 && number_of_cores >= 3) {
224    config_->g_threads = 2;  // 2 threads for qHD/HD.
225  } else {
226    config_->g_threads = 1;  // 1 thread for VGA or less
227  }
228
229  // rate control settings
230  config_->rc_dropframe_thresh = inst->codecSpecific.VP8.frameDroppingOn ?
231      30 : 0;
232  config_->rc_end_usage = VPX_CBR;
233  config_->g_pass = VPX_RC_ONE_PASS;
234  // Handle resizing outside of libvpx.
235  config_->rc_resize_allowed = 0;
236  config_->rc_min_quantizer = 2;
237  config_->rc_max_quantizer = inst->qpMax;
238  config_->rc_undershoot_pct = 100;
239  config_->rc_overshoot_pct = 15;
240  config_->rc_buf_initial_sz = 500;
241  config_->rc_buf_optimal_sz = 600;
242  config_->rc_buf_sz = 1000;
243  // set the maximum target size of any key-frame.
244  rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
245
246  if (feedback_mode_) {
247    // Disable periodic key frames if we get feedback from the decoder
248    // through SLI and RPSI.
249    config_->kf_mode = VPX_KF_DISABLED;
250  } else if (inst->codecSpecific.VP8.keyFrameInterval  > 0) {
251    config_->kf_mode = VPX_KF_AUTO;
252    config_->kf_max_dist = inst->codecSpecific.VP8.keyFrameInterval;
253  } else {
254    config_->kf_mode = VPX_KF_DISABLED;
255  }
256  switch (inst->codecSpecific.VP8.complexity) {
257    case kComplexityHigh:
258      cpu_speed_ = -5;
259      break;
260    case kComplexityHigher:
261      cpu_speed_ = -4;
262      break;
263    case kComplexityMax:
264      cpu_speed_ = -3;
265      break;
266    default:
267      cpu_speed_ = -6;
268      break;
269  }
270#if defined(WEBRTC_ARCH_ARM)
271  // On mobile platform, always set to -12 to leverage between cpu usage
272  // and video quality
273  cpu_speed_ = -12;
274#endif
275  rps_->Init();
276  quality_scaler_.Init(codec_.qpMax);
277  quality_scaler_.ReportFramerate(codec_.maxFramerate);
278  return InitAndSetControlSettings(inst);
279}
280
281int VP8EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
282  vpx_codec_flags_t flags = 0;
283  // TODO(holmer): We should make a smarter decision on the number of
284  // partitions. Eight is probably not the optimal number for low resolution
285  // video.
286  flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
287  if (vpx_codec_enc_init(encoder_, vpx_codec_vp8_cx(), config_, flags)) {
288    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
289  }
290  vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1);
291  vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
292  vpx_codec_control(encoder_, VP8E_SET_TOKEN_PARTITIONS,
293                    static_cast<vp8e_token_partitions>(token_partitions_));
294#if !defined(WEBRTC_ARCH_ARM)
295  // TODO(fbarchard): Enable Noise reduction for ARM once optimized.
296  vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY,
297                    inst->codecSpecific.VP8.denoisingOn ? 1 : 0);
298#endif
299  vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
300                    rc_max_intra_target_);
301  inited_ = true;
302
303  return WEBRTC_VIDEO_CODEC_OK;
304}
305
306uint32_t VP8EncoderImpl::MaxIntraTarget(uint32_t optimalBuffersize) {
307  // Set max to the optimal buffer level (normalized by target BR),
308  // and scaled by a scalePar.
309  // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
310  // This values is presented in percentage of perFrameBw:
311  // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
312  // The target in % is as follows:
313
314  float scalePar = 0.5;
315  uint32_t targetPct = optimalBuffersize * scalePar * codec_.maxFramerate / 10;
316
317  // Don't go below 3 times the per frame bandwidth.
318  const uint32_t minIntraTh = 300;
319  return (targetPct < minIntraTh) ? minIntraTh: targetPct;
320}
321
322int VP8EncoderImpl::Encode(const I420VideoFrame& input_frame,
323                           const CodecSpecificInfo* codec_specific_info,
324                           const std::vector<VideoFrameType>* frame_types) {
325  TRACE_EVENT1("webrtc", "VP8::Encode", "timestamp", input_frame.timestamp());
326
327  if (!inited_) {
328    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
329  }
330  if (input_frame.IsZeroSize()) {
331    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
332  }
333  if (encoded_complete_callback_ == NULL) {
334    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
335  }
336
337  VideoFrameType frame_type = kDeltaFrame;
338  // We only support one stream at the moment.
339  if (frame_types && frame_types->size() > 0) {
340    frame_type = (*frame_types)[0];
341  }
342
343  const I420VideoFrame& frame =
344      config_->rc_dropframe_thresh > 0 &&
345              codec_.codecSpecific.VP8.automaticResizeOn
346          ? quality_scaler_.GetScaledFrame(input_frame)
347          : input_frame;
348
349  // Check for change in frame size.
350  if (frame.width() != codec_.width ||
351      frame.height() != codec_.height) {
352    int ret = UpdateCodecFrameSize(frame);
353    if (ret < 0) {
354      return ret;
355    }
356  }
357  // Image in vpx_image_t format.
358  // Input frame is const. VP8's raw frame is not defined as const.
359  raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(frame.buffer(kYPlane));
360  raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(frame.buffer(kUPlane));
361  raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(frame.buffer(kVPlane));
362  // TODO(mikhal): Stride should be set in initialization.
363  raw_->stride[VPX_PLANE_Y] = frame.stride(kYPlane);
364  raw_->stride[VPX_PLANE_U] = frame.stride(kUPlane);
365  raw_->stride[VPX_PLANE_V] = frame.stride(kVPlane);
366
367  int flags = temporal_layers_->EncodeFlags(frame.timestamp());
368
369  bool send_keyframe = (frame_type == kKeyFrame);
370  if (send_keyframe) {
371    // Key frame request from caller.
372    // Will update both golden and alt-ref.
373    flags = VPX_EFLAG_FORCE_KF;
374  } else if (feedback_mode_ && codec_specific_info) {
375    // Handle RPSI and SLI messages and set up the appropriate encode flags.
376    bool sendRefresh = false;
377    if (codec_specific_info->codecType == kVideoCodecVP8) {
378      if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
379        rps_->ReceivedRPSI(
380            codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
381      }
382      if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
383        sendRefresh = rps_->ReceivedSLI(frame.timestamp());
384      }
385    }
386    flags = rps_->EncodeFlags(picture_id_, sendRefresh,
387                              frame.timestamp());
388  }
389
390  // TODO(holmer): Ideally the duration should be the timestamp diff of this
391  // frame and the next frame to be encoded, which we don't have. Instead we
392  // would like to use the duration of the previous frame. Unfortunately the
393  // rate control seems to be off with that setup. Using the average input
394  // frame rate to calculate an average duration for now.
395  assert(codec_.maxFramerate > 0);
396  uint32_t duration = 90000 / codec_.maxFramerate;
397  if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
398                       VPX_DL_REALTIME)) {
399    return WEBRTC_VIDEO_CODEC_ERROR;
400  }
401  timestamp_ += duration;
402
403  return GetEncodedPartitions(frame);
404}
405
406int VP8EncoderImpl::UpdateCodecFrameSize(const I420VideoFrame& input_image) {
407  codec_.width = input_image.width();
408  codec_.height = input_image.height();
409  raw_->w = codec_.width;
410  raw_->h = codec_.height;
411  raw_->d_w = codec_.width;
412  raw_->d_h = codec_.height;
413
414  raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
415  raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
416  raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
417  vpx_img_set_rect(raw_, 0, 0, codec_.width, codec_.height);
418
419  // Update encoder context for new frame size.
420  // Change of frame size will automatically trigger a key frame.
421  config_->g_w = codec_.width;
422  config_->g_h = codec_.height;
423  if (vpx_codec_enc_config_set(encoder_, config_)) {
424    return WEBRTC_VIDEO_CODEC_ERROR;
425  }
426  return WEBRTC_VIDEO_CODEC_OK;
427}
428
429void VP8EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
430                                       const vpx_codec_cx_pkt& pkt,
431                                       uint32_t timestamp) {
432  assert(codec_specific != NULL);
433  codec_specific->codecType = kVideoCodecVP8;
434  CodecSpecificInfoVP8 *vp8Info = &(codec_specific->codecSpecific.VP8);
435  vp8Info->pictureId = picture_id_;
436  vp8Info->simulcastIdx = 0;
437  vp8Info->keyIdx = kNoKeyIdx;  // TODO(hlundin) populate this
438  vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
439  temporal_layers_->PopulateCodecSpecific(
440      (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? true : false, vp8Info,
441          timestamp);
442  picture_id_ = (picture_id_ + 1) & 0x7FFF;  // prepare next
443}
444
445int VP8EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
446  vpx_codec_iter_t iter = NULL;
447  int part_idx = 0;
448  encoded_image_._length = 0;
449  encoded_image_._frameType = kDeltaFrame;
450  RTPFragmentationHeader frag_info;
451  frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) + 1);
452  CodecSpecificInfo codec_specific;
453
454  const vpx_codec_cx_pkt_t *pkt = NULL;
455  while ((pkt = vpx_codec_get_cx_data(encoder_, &iter)) != NULL) {
456    switch (pkt->kind) {
457      case VPX_CODEC_CX_FRAME_PKT: {
458        memcpy(&encoded_image_._buffer[encoded_image_._length],
459               pkt->data.frame.buf,
460               pkt->data.frame.sz);
461        frag_info.fragmentationOffset[part_idx] = encoded_image_._length;
462        frag_info.fragmentationLength[part_idx] =  pkt->data.frame.sz;
463        frag_info.fragmentationPlType[part_idx] = 0;  // not known here
464        frag_info.fragmentationTimeDiff[part_idx] = 0;
465        encoded_image_._length += pkt->data.frame.sz;
466        assert(encoded_image_._length <= encoded_image_._size);
467        ++part_idx;
468        break;
469      }
470      default: {
471        break;
472      }
473    }
474    // End of frame
475    if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
476      // check if encoded frame is a key frame
477      if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
478          encoded_image_._frameType = kKeyFrame;
479          rps_->EncodedKeyFrame(picture_id_);
480      }
481      PopulateCodecSpecific(&codec_specific, *pkt, input_image.timestamp());
482      break;
483    }
484  }
485  if (encoded_image_._length > 0) {
486    TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
487    encoded_image_._timeStamp = input_image.timestamp();
488    encoded_image_.capture_time_ms_ = input_image.render_time_ms();
489    encoded_image_._encodedHeight = codec_.height;
490    encoded_image_._encodedWidth = codec_.width;
491    encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
492                                      &frag_info);
493    int qp;
494    vpx_codec_control(encoder_, VP8E_GET_LAST_QUANTIZER_64, &qp);
495    quality_scaler_.ReportEncodedFrame(qp);
496  } else {
497    quality_scaler_.ReportDroppedFrame();
498  }
499  return WEBRTC_VIDEO_CODEC_OK;
500}
501
502int VP8EncoderImpl::SetChannelParameters(uint32_t /*packet_loss*/, int rtt) {
503  rps_->SetRtt(rtt);
504  return WEBRTC_VIDEO_CODEC_OK;
505}
506
507int VP8EncoderImpl::RegisterEncodeCompleteCallback(
508    EncodedImageCallback* callback) {
509  encoded_complete_callback_ = callback;
510  return WEBRTC_VIDEO_CODEC_OK;
511}
512
513VP8DecoderImpl::VP8DecoderImpl()
514    : decode_complete_callback_(NULL),
515      inited_(false),
516      feedback_mode_(false),
517      decoder_(NULL),
518      last_keyframe_(),
519      image_format_(VPX_IMG_FMT_NONE),
520      ref_frame_(NULL),
521      propagation_cnt_(-1),
522      mfqe_enabled_(false),
523      key_frame_required_(true) {
524  memset(&codec_, 0, sizeof(codec_));
525}
526
527VP8DecoderImpl::~VP8DecoderImpl() {
528  inited_ = true;  // in order to do the actual release
529  Release();
530}
531
532int VP8DecoderImpl::Reset() {
533  if (!inited_) {
534    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
535  }
536  InitDecode(&codec_, 1);
537  propagation_cnt_ = -1;
538  mfqe_enabled_ = false;
539  return WEBRTC_VIDEO_CODEC_OK;
540}
541
542int VP8DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
543  if (inst == NULL) {
544    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
545  }
546  int ret_val = Release();
547  if (ret_val < 0) {
548    return ret_val;
549  }
550  if (decoder_ == NULL) {
551    decoder_ = new vpx_dec_ctx_t;
552  }
553  if (inst->codecType == kVideoCodecVP8) {
554    feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
555  }
556  vpx_codec_dec_cfg_t  cfg;
557  // Setting number of threads to a constant value (1)
558  cfg.threads = 1;
559  cfg.h = cfg.w = 0;  // set after decode
560
561  vpx_codec_flags_t flags = 0;
562#ifndef WEBRTC_ARCH_ARM
563  flags = VPX_CODEC_USE_POSTPROC;
564  if (inst->codecSpecific.VP8.errorConcealmentOn) {
565    flags |= VPX_CODEC_USE_ERROR_CONCEALMENT;
566  }
567#ifdef INDEPENDENT_PARTITIONS
568  flags |= VPX_CODEC_USE_INPUT_PARTITION;
569#endif
570#endif
571
572  if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
573    return WEBRTC_VIDEO_CODEC_MEMORY;
574  }
575
576#ifndef WEBRTC_ARCH_ARM
577  vp8_postproc_cfg_t  ppcfg;
578  ppcfg.post_proc_flag = VP8_DEMACROBLOCK | VP8_DEBLOCK;
579  // Strength of deblocking filter. Valid range:[0,16]
580  ppcfg.deblocking_level = 3;
581  vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
582#endif
583
584  if (&codec_ != inst) {
585    // Save VideoCodec instance for later; mainly for duplicating the decoder.
586    codec_ = *inst;
587  }
588
589  propagation_cnt_ = -1;
590
591  inited_ = true;
592
593  // Always start with a complete key frame.
594  key_frame_required_ = true;
595
596  return WEBRTC_VIDEO_CODEC_OK;
597}
598
599int VP8DecoderImpl::Decode(const EncodedImage& input_image,
600                           bool missing_frames,
601                           const RTPFragmentationHeader* fragmentation,
602                           const CodecSpecificInfo* codec_specific_info,
603                           int64_t /*render_time_ms*/) {
604  if (!inited_) {
605    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
606  }
607  if (decode_complete_callback_ == NULL) {
608    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
609  }
610  if (input_image._buffer == NULL && input_image._length > 0) {
611    // Reset to avoid requesting key frames too often.
612    if (propagation_cnt_ > 0)
613      propagation_cnt_ = 0;
614    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
615  }
616
617#ifdef INDEPENDENT_PARTITIONS
618  if (fragmentation == NULL) {
619    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
620  }
621#endif
622
623#ifndef WEBRTC_ARCH_ARM
624  if (!mfqe_enabled_ && codec_specific_info &&
625      codec_specific_info->codecSpecific.VP8.temporalIdx > 0) {
626    // Enable MFQE if we are receiving layers.
627    // temporalIdx is set in the jitter buffer according to what the RTP
628    // header says.
629    mfqe_enabled_ = true;
630    vp8_postproc_cfg_t  ppcfg;
631    ppcfg.post_proc_flag = VP8_MFQE | VP8_DEMACROBLOCK | VP8_DEBLOCK;
632    ppcfg.deblocking_level = 3;
633    vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
634  }
635#endif
636
637
638  // Always start with a complete key frame.
639  if (key_frame_required_) {
640    if (input_image._frameType != kKeyFrame)
641      return WEBRTC_VIDEO_CODEC_ERROR;
642    // We have a key frame - is it complete?
643    if (input_image._completeFrame) {
644      key_frame_required_ = false;
645    } else {
646      return WEBRTC_VIDEO_CODEC_ERROR;
647    }
648  }
649  // Restrict error propagation using key frame requests. Disabled when
650  // the feedback mode is enabled (RPS).
651  // Reset on a key frame refresh.
652  if (!feedback_mode_) {
653    if (input_image._frameType == kKeyFrame && input_image._completeFrame)
654      propagation_cnt_ = -1;
655    // Start count on first loss.
656    else if ((!input_image._completeFrame || missing_frames) &&
657        propagation_cnt_ == -1)
658      propagation_cnt_ = 0;
659    if (propagation_cnt_ >= 0)
660      propagation_cnt_++;
661  }
662
663  vpx_codec_iter_t iter = NULL;
664  vpx_image_t* img;
665  int ret;
666
667  // Check for missing frames.
668  if (missing_frames) {
669    // Call decoder with zero data length to signal missing frames.
670    if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) {
671      // Reset to avoid requesting key frames too often.
672      if (propagation_cnt_ > 0)
673        propagation_cnt_ = 0;
674      return WEBRTC_VIDEO_CODEC_ERROR;
675    }
676    // We don't render this frame.
677    vpx_codec_get_frame(decoder_, &iter);
678    iter = NULL;
679  }
680
681#ifdef INDEPENDENT_PARTITIONS
682  if (DecodePartitions(inputImage, fragmentation)) {
683    // Reset to avoid requesting key frames too often.
684    if (propagation_cnt_ > 0) {
685      propagation_cnt_ = 0;
686    }
687    return WEBRTC_VIDEO_CODEC_ERROR;
688  }
689#else
690  uint8_t* buffer = input_image._buffer;
691  if (input_image._length == 0) {
692    buffer = NULL;  // Triggers full frame concealment.
693  }
694  if (vpx_codec_decode(decoder_,
695                       buffer,
696                       input_image._length,
697                       0,
698                       VPX_DL_REALTIME)) {
699    // Reset to avoid requesting key frames too often.
700    if (propagation_cnt_ > 0)
701      propagation_cnt_ = 0;
702    return WEBRTC_VIDEO_CODEC_ERROR;
703  }
704#endif
705
706  // Store encoded frame if key frame. (Used in Copy method.)
707  if (input_image._frameType == kKeyFrame && input_image._buffer != NULL) {
708    const uint32_t bytes_to_copy = input_image._length;
709    if (last_keyframe_._size < bytes_to_copy) {
710      delete [] last_keyframe_._buffer;
711      last_keyframe_._buffer = NULL;
712      last_keyframe_._size = 0;
713    }
714
715    uint8_t* temp_buffer = last_keyframe_._buffer;  // Save buffer ptr.
716    uint32_t temp_size = last_keyframe_._size;  // Save size.
717    last_keyframe_ = input_image;  // Shallow copy.
718    last_keyframe_._buffer = temp_buffer;  // Restore buffer ptr.
719    last_keyframe_._size = temp_size;  // Restore buffer size.
720    if (!last_keyframe_._buffer) {
721      // Allocate memory.
722      last_keyframe_._size = bytes_to_copy;
723      last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
724    }
725    // Copy encoded frame.
726    memcpy(last_keyframe_._buffer, input_image._buffer, bytes_to_copy);
727    last_keyframe_._length = bytes_to_copy;
728  }
729
730  img = vpx_codec_get_frame(decoder_, &iter);
731  ret = ReturnFrame(img, input_image._timeStamp, input_image.ntp_time_ms_);
732  if (ret != 0) {
733    // Reset to avoid requesting key frames too often.
734    if (ret < 0 && propagation_cnt_ > 0)
735      propagation_cnt_ = 0;
736    return ret;
737  }
738  if (feedback_mode_) {
739    // Whenever we receive an incomplete key frame all reference buffers will
740    // be corrupt. If that happens we must request new key frames until we
741    // decode a complete.
742    if (input_image._frameType == kKeyFrame && !input_image._completeFrame)
743      return WEBRTC_VIDEO_CODEC_ERROR;
744
745    // Check for reference updates and last reference buffer corruption and
746    // signal successful reference propagation or frame corruption to the
747    // encoder.
748    int reference_updates = 0;
749    if (vpx_codec_control(decoder_, VP8D_GET_LAST_REF_UPDATES,
750                          &reference_updates)) {
751      // Reset to avoid requesting key frames too often.
752      if (propagation_cnt_ > 0)
753        propagation_cnt_ = 0;
754      return WEBRTC_VIDEO_CODEC_ERROR;
755    }
756    int corrupted = 0;
757    if (vpx_codec_control(decoder_, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
758      // Reset to avoid requesting key frames too often.
759      if (propagation_cnt_ > 0)
760        propagation_cnt_ = 0;
761      return WEBRTC_VIDEO_CODEC_ERROR;
762    }
763    int16_t picture_id = -1;
764    if (codec_specific_info) {
765      picture_id = codec_specific_info->codecSpecific.VP8.pictureId;
766    }
767    if (picture_id > -1) {
768      if (((reference_updates & VP8_GOLD_FRAME) ||
769          (reference_updates & VP8_ALTR_FRAME)) && !corrupted) {
770        decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id);
771      }
772      decode_complete_callback_->ReceivedDecodedFrame(picture_id);
773    }
774    if (corrupted) {
775      // we can decode but with artifacts
776      return WEBRTC_VIDEO_CODEC_REQUEST_SLI;
777    }
778  }
779  // Check Vs. threshold
780  if (propagation_cnt_ > kVp8ErrorPropagationTh) {
781    // Reset to avoid requesting key frames too often.
782    propagation_cnt_ = 0;
783    return WEBRTC_VIDEO_CODEC_ERROR;
784  }
785  return WEBRTC_VIDEO_CODEC_OK;
786}
787
788int VP8DecoderImpl::DecodePartitions(
789    const EncodedImage& input_image,
790    const RTPFragmentationHeader* fragmentation) {
791  for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
792    const uint8_t* partition = input_image._buffer +
793        fragmentation->fragmentationOffset[i];
794    const uint32_t partition_length =
795        fragmentation->fragmentationLength[i];
796    if (vpx_codec_decode(decoder_,
797                         partition,
798                         partition_length,
799                         0,
800                         VPX_DL_REALTIME)) {
801      return WEBRTC_VIDEO_CODEC_ERROR;
802    }
803  }
804  // Signal end of frame data. If there was no frame data this will trigger
805  // a full frame concealment.
806  if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME))
807    return WEBRTC_VIDEO_CODEC_ERROR;
808  return WEBRTC_VIDEO_CODEC_OK;
809}
810
811int VP8DecoderImpl::ReturnFrame(const vpx_image_t* img,
812                                uint32_t timestamp,
813                                int64_t ntp_time_ms) {
814  if (img == NULL) {
815    // Decoder OK and NULL image => No show frame
816    return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
817  }
818  int half_height = (img->d_h + 1) / 2;
819  int size_y = img->stride[VPX_PLANE_Y] * img->d_h;
820  int size_u = img->stride[VPX_PLANE_U] * half_height;
821  int size_v = img->stride[VPX_PLANE_V] * half_height;
822  // TODO(mikhal): This does  a copy - need to SwapBuffers.
823  decoded_image_.CreateFrame(size_y, img->planes[VPX_PLANE_Y],
824                             size_u, img->planes[VPX_PLANE_U],
825                             size_v, img->planes[VPX_PLANE_V],
826                             img->d_w, img->d_h,
827                             img->stride[VPX_PLANE_Y],
828                             img->stride[VPX_PLANE_U],
829                             img->stride[VPX_PLANE_V]);
830  decoded_image_.set_timestamp(timestamp);
831  decoded_image_.set_ntp_time_ms(ntp_time_ms);
832  int ret = decode_complete_callback_->Decoded(decoded_image_);
833  if (ret != 0)
834    return ret;
835
836  // Remember image format for later
837  image_format_ = img->fmt;
838  return WEBRTC_VIDEO_CODEC_OK;
839}
840
841int VP8DecoderImpl::RegisterDecodeCompleteCallback(
842    DecodedImageCallback* callback) {
843  decode_complete_callback_ = callback;
844  return WEBRTC_VIDEO_CODEC_OK;
845}
846
847int VP8DecoderImpl::Release() {
848  if (last_keyframe_._buffer != NULL) {
849    delete [] last_keyframe_._buffer;
850    last_keyframe_._buffer = NULL;
851  }
852  if (decoder_ != NULL) {
853    if (vpx_codec_destroy(decoder_)) {
854      return WEBRTC_VIDEO_CODEC_MEMORY;
855    }
856    delete decoder_;
857    decoder_ = NULL;
858  }
859  if (ref_frame_ != NULL) {
860    vpx_img_free(&ref_frame_->img);
861    delete ref_frame_;
862    ref_frame_ = NULL;
863  }
864  inited_ = false;
865  return WEBRTC_VIDEO_CODEC_OK;
866}
867
868VideoDecoder* VP8DecoderImpl::Copy() {
869  // Sanity checks.
870  if (!inited_) {
871    // Not initialized.
872    assert(false);
873    return NULL;
874  }
875  if (decoded_image_.IsZeroSize()) {
876    // Nothing has been decoded before; cannot clone.
877    return NULL;
878  }
879  if (last_keyframe_._buffer == NULL) {
880    // Cannot clone if we have no key frame to start with.
881    return NULL;
882  }
883  // Create a new VideoDecoder object
884  VP8DecoderImpl *copy = new VP8DecoderImpl;
885
886  // Initialize the new decoder
887  if (copy->InitDecode(&codec_, 1) != WEBRTC_VIDEO_CODEC_OK) {
888    delete copy;
889    return NULL;
890  }
891  // Inject last key frame into new decoder.
892  if (vpx_codec_decode(copy->decoder_, last_keyframe_._buffer,
893                       last_keyframe_._length, NULL, VPX_DL_REALTIME)) {
894    delete copy;
895    return NULL;
896  }
897  // Allocate memory for reference image copy
898  assert(decoded_image_.width() > 0);
899  assert(decoded_image_.height() > 0);
900  assert(image_format_ > VPX_IMG_FMT_NONE);
901  // Check if frame format has changed.
902  if (ref_frame_ &&
903      (decoded_image_.width() != static_cast<int>(ref_frame_->img.d_w) ||
904          decoded_image_.height() != static_cast<int>(ref_frame_->img.d_h) ||
905          image_format_ != ref_frame_->img.fmt)) {
906    vpx_img_free(&ref_frame_->img);
907    delete ref_frame_;
908    ref_frame_ = NULL;
909  }
910
911
912  if (!ref_frame_) {
913    ref_frame_ = new vpx_ref_frame_t;
914
915    unsigned int align = 16;
916    if (!vpx_img_alloc(&ref_frame_->img,
917                       static_cast<vpx_img_fmt_t>(image_format_),
918                       decoded_image_.width(), decoded_image_.height(),
919                       align)) {
920      assert(false);
921      delete copy;
922      return NULL;
923    }
924  }
925  const vpx_ref_frame_type_t type_vec[] = { VP8_LAST_FRAME, VP8_GOLD_FRAME,
926      VP8_ALTR_FRAME };
927  for (uint32_t ix = 0;
928      ix < sizeof(type_vec) / sizeof(vpx_ref_frame_type_t); ++ix) {
929    ref_frame_->frame_type = type_vec[ix];
930    if (CopyReference(copy) < 0) {
931      delete copy;
932      return NULL;
933    }
934  }
935  // Copy all member variables (that are not set in initialization).
936  copy->feedback_mode_ = feedback_mode_;
937  copy->image_format_ = image_format_;
938  copy->last_keyframe_ = last_keyframe_;  // Shallow copy.
939  // Allocate memory. (Discard copied _buffer pointer.)
940  copy->last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
941  memcpy(copy->last_keyframe_._buffer, last_keyframe_._buffer,
942         last_keyframe_._length);
943
944  return static_cast<VideoDecoder*>(copy);
945}
946
947int VP8DecoderImpl::CopyReference(VP8Decoder* copyTo) {
948  // The type of frame to copy should be set in ref_frame_->frame_type
949  // before the call to this function.
950  if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_)
951      != VPX_CODEC_OK) {
952    return -1;
953  }
954  if (vpx_codec_control(static_cast<VP8DecoderImpl*>(copyTo)->decoder_,
955                        VP8_SET_REFERENCE, ref_frame_) != VPX_CODEC_OK) {
956    return -1;
957  }
958  return 0;
959}
960
961}  // namespace webrtc
962