1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include <stdio.h>
12
13#include "testing/gtest/include/gtest/gtest.h"
14#include "webrtc/base/checks.h"
15#include "webrtc/base/scoped_ptr.h"
16#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
17#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
18#include "webrtc/system_wrappers/include/tick_util.h"
19#include "webrtc/test/testsupport/fileutils.h"
20
21namespace webrtc {
22
23enum { kMaxWaitEncTimeMs = 100 };
24enum { kMaxWaitDecTimeMs = 25 };
25
26static const uint32_t kTestTimestamp = 123;
27static const int64_t kTestNtpTimeMs = 456;
28
29// TODO(mikhal): Replace these with mocks.
30class Vp8UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback {
31 public:
32  Vp8UnitTestEncodeCompleteCallback(EncodedImage* frame,
33                                    unsigned int decoderSpecificSize,
34                                    void* decoderSpecificInfo)
35      : encoded_frame_(frame), encode_complete_(false) {}
36
37  virtual int Encoded(const EncodedImage& encoded_frame_,
38                      const CodecSpecificInfo* codecSpecificInfo,
39                      const RTPFragmentationHeader*);
40  bool EncodeComplete();
41
42 private:
43  EncodedImage* const encoded_frame_;
44  rtc::scoped_ptr<uint8_t[]> frame_buffer_;
45  bool encode_complete_;
46};
47
48int Vp8UnitTestEncodeCompleteCallback::Encoded(
49    const EncodedImage& encoded_frame,
50    const CodecSpecificInfo* codecSpecificInfo,
51    const RTPFragmentationHeader* fragmentation) {
52  if (encoded_frame_->_size < encoded_frame._length) {
53    delete[] encoded_frame_->_buffer;
54    frame_buffer_.reset(new uint8_t[encoded_frame._length]);
55    encoded_frame_->_buffer = frame_buffer_.get();
56    encoded_frame_->_size = encoded_frame._length;
57  }
58  memcpy(encoded_frame_->_buffer, encoded_frame._buffer, encoded_frame._length);
59  encoded_frame_->_length = encoded_frame._length;
60  encoded_frame_->_encodedWidth = encoded_frame._encodedWidth;
61  encoded_frame_->_encodedHeight = encoded_frame._encodedHeight;
62  encoded_frame_->_timeStamp = encoded_frame._timeStamp;
63  encoded_frame_->_frameType = encoded_frame._frameType;
64  encoded_frame_->_completeFrame = encoded_frame._completeFrame;
65  encode_complete_ = true;
66  return 0;
67}
68
69bool Vp8UnitTestEncodeCompleteCallback::EncodeComplete() {
70  if (encode_complete_) {
71    encode_complete_ = false;
72    return true;
73  }
74  return false;
75}
76
77class Vp8UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback {
78 public:
79  explicit Vp8UnitTestDecodeCompleteCallback(VideoFrame* frame)
80      : decoded_frame_(frame), decode_complete(false) {}
81  int32_t Decoded(VideoFrame& frame) override;
82  int32_t Decoded(VideoFrame& frame, int64_t decode_time_ms) override {
83    RTC_NOTREACHED();
84    return -1;
85  }
86  bool DecodeComplete();
87
88 private:
89  VideoFrame* decoded_frame_;
90  bool decode_complete;
91};
92
93bool Vp8UnitTestDecodeCompleteCallback::DecodeComplete() {
94  if (decode_complete) {
95    decode_complete = false;
96    return true;
97  }
98  return false;
99}
100
101int Vp8UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image) {
102  decoded_frame_->CopyFrame(image);
103  decode_complete = true;
104  return 0;
105}
106
107class TestVp8Impl : public ::testing::Test {
108 protected:
109  virtual void SetUp() {
110    encoder_.reset(VP8Encoder::Create());
111    decoder_.reset(VP8Decoder::Create());
112    memset(&codec_inst_, 0, sizeof(codec_inst_));
113    encode_complete_callback_.reset(
114        new Vp8UnitTestEncodeCompleteCallback(&encoded_frame_, 0, NULL));
115    decode_complete_callback_.reset(
116        new Vp8UnitTestDecodeCompleteCallback(&decoded_frame_));
117    encoder_->RegisterEncodeCompleteCallback(encode_complete_callback_.get());
118    decoder_->RegisterDecodeCompleteCallback(decode_complete_callback_.get());
119    // Using a QCIF image (aligned stride (u,v planes) > width).
120    // Processing only one frame.
121    length_source_frame_ = CalcBufferSize(kI420, kWidth, kHeight);
122    source_buffer_.reset(new uint8_t[length_source_frame_]);
123    source_file_ = fopen(test::ResourcePath("paris_qcif", "yuv").c_str(), "rb");
124    ASSERT_TRUE(source_file_ != NULL);
125    // Set input frame.
126    ASSERT_EQ(
127        fread(source_buffer_.get(), 1, length_source_frame_, source_file_),
128        length_source_frame_);
129    codec_inst_.width = kWidth;
130    codec_inst_.height = kHeight;
131    const int kFramerate = 30;
132    codec_inst_.maxFramerate = kFramerate;
133    // Setting aligned stride values.
134    int stride_uv = 0;
135    int stride_y = 0;
136    Calc16ByteAlignedStride(codec_inst_.width, &stride_y, &stride_uv);
137    EXPECT_EQ(stride_y, 176);
138    EXPECT_EQ(stride_uv, 96);
139
140    input_frame_.CreateEmptyFrame(codec_inst_.width, codec_inst_.height,
141                                  stride_y, stride_uv, stride_uv);
142    input_frame_.set_timestamp(kTestTimestamp);
143    // Using ConvertToI420 to add stride to the image.
144    EXPECT_EQ(0, ConvertToI420(kI420, source_buffer_.get(), 0, 0,
145                               codec_inst_.width, codec_inst_.height, 0,
146                               kVideoRotation_0, &input_frame_));
147  }
148
149  void SetUpEncodeDecode() {
150    codec_inst_.startBitrate = 300;
151    codec_inst_.maxBitrate = 4000;
152    codec_inst_.qpMax = 56;
153    codec_inst_.codecSpecific.VP8.denoisingOn = true;
154
155    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
156              encoder_->InitEncode(&codec_inst_, 1, 1440));
157    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
158  }
159
160  size_t WaitForEncodedFrame() const {
161    int64_t startTime = TickTime::MillisecondTimestamp();
162    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs) {
163      if (encode_complete_callback_->EncodeComplete()) {
164        return encoded_frame_._length;
165      }
166    }
167    return 0;
168  }
169
170  size_t WaitForDecodedFrame() const {
171    int64_t startTime = TickTime::MillisecondTimestamp();
172    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs) {
173      if (decode_complete_callback_->DecodeComplete()) {
174        return CalcBufferSize(kI420, decoded_frame_.width(),
175                              decoded_frame_.height());
176      }
177    }
178    return 0;
179  }
180
181  const int kWidth = 172;
182  const int kHeight = 144;
183
184  rtc::scoped_ptr<Vp8UnitTestEncodeCompleteCallback> encode_complete_callback_;
185  rtc::scoped_ptr<Vp8UnitTestDecodeCompleteCallback> decode_complete_callback_;
186  rtc::scoped_ptr<uint8_t[]> source_buffer_;
187  FILE* source_file_;
188  VideoFrame input_frame_;
189  rtc::scoped_ptr<VideoEncoder> encoder_;
190  rtc::scoped_ptr<VideoDecoder> decoder_;
191  EncodedImage encoded_frame_;
192  VideoFrame decoded_frame_;
193  size_t length_source_frame_;
194  VideoCodec codec_inst_;
195};
196
197TEST_F(TestVp8Impl, EncoderParameterTest) {
198  strncpy(codec_inst_.plName, "VP8", 31);
199  codec_inst_.plType = 126;
200  codec_inst_.maxBitrate = 0;
201  codec_inst_.minBitrate = 0;
202  codec_inst_.width = 1440;
203  codec_inst_.height = 1080;
204  codec_inst_.maxFramerate = 30;
205  codec_inst_.startBitrate = 300;
206  codec_inst_.qpMax = 56;
207  codec_inst_.codecSpecific.VP8.complexity = kComplexityNormal;
208  codec_inst_.codecSpecific.VP8.numberOfTemporalLayers = 1;
209  // Calls before InitEncode().
210  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
211  int bit_rate = 300;
212  EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
213            encoder_->SetRates(bit_rate, codec_inst_.maxFramerate));
214
215  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->InitEncode(&codec_inst_, 1, 1440));
216
217  // Decoder parameter tests.
218  // Calls before InitDecode().
219  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
220  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->InitDecode(&codec_inst_, 1));
221}
222
223#if defined(WEBRTC_ANDROID)
224#define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
225#else
226#define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
227#endif
228TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
229  SetUpEncodeDecode();
230  encoder_->Encode(input_frame_, NULL, NULL);
231  EXPECT_GT(WaitForEncodedFrame(), 0u);
232  // First frame should be a key frame.
233  encoded_frame_._frameType = kVideoFrameKey;
234  encoded_frame_.ntp_time_ms_ = kTestNtpTimeMs;
235  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
236            decoder_->Decode(encoded_frame_, false, NULL));
237  EXPECT_GT(WaitForDecodedFrame(), 0u);
238  // Compute PSNR on all planes (faster than SSIM).
239  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
240  EXPECT_EQ(kTestTimestamp, decoded_frame_.timestamp());
241  EXPECT_EQ(kTestNtpTimeMs, decoded_frame_.ntp_time_ms());
242}
243
244#if defined(WEBRTC_ANDROID)
245#define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame
246#else
247#define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame
248#endif
249TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
250  SetUpEncodeDecode();
251  encoder_->Encode(input_frame_, NULL, NULL);
252  EXPECT_GT(WaitForEncodedFrame(), 0u);
253  // Setting complete to false -> should return an error.
254  encoded_frame_._completeFrame = false;
255  EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
256            decoder_->Decode(encoded_frame_, false, NULL));
257  // Setting complete back to true. Forcing a delta frame.
258  encoded_frame_._frameType = kVideoFrameDelta;
259  encoded_frame_._completeFrame = true;
260  EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
261            decoder_->Decode(encoded_frame_, false, NULL));
262  // Now setting a key frame.
263  encoded_frame_._frameType = kVideoFrameKey;
264  EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
265            decoder_->Decode(encoded_frame_, false, NULL));
266  EXPECT_GT(I420PSNR(&input_frame_, &decoded_frame_), 36);
267}
268
269TEST_F(TestVp8Impl, TestReset) {
270  SetUpEncodeDecode();
271  EXPECT_EQ(0, encoder_->Encode(input_frame_, NULL, NULL));
272  EXPECT_EQ(0, decoder_->Decode(encoded_frame_, false, NULL));
273  size_t length = CalcBufferSize(kI420, kWidth, kHeight);
274  rtc::scoped_ptr<uint8_t[]> first_frame_buffer(new uint8_t[length]);
275  ExtractBuffer(decoded_frame_, length, first_frame_buffer.get());
276
277  EXPECT_EQ(0, decoder_->Reset());
278
279  EXPECT_EQ(0, decoder_->Decode(encoded_frame_, false, NULL));
280  rtc::scoped_ptr<uint8_t[]> second_frame_buffer(new uint8_t[length]);
281  ExtractBuffer(decoded_frame_, length, second_frame_buffer.get());
282
283  EXPECT_EQ(
284      0, memcmp(second_frame_buffer.get(), first_frame_buffer.get(), length));
285}
286
287}  // namespace webrtc
288