1/*
2 *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10#include "webrtc/video/video_capture_input.h"
11
12#include <vector>
13
14#include "testing/gmock/include/gmock/gmock.h"
15#include "testing/gtest/include/gtest/gtest.h"
16#include "webrtc/base/event.h"
17#include "webrtc/base/scoped_ptr.h"
18#include "webrtc/common.h"
19#include "webrtc/modules/utility/include/mock/mock_process_thread.h"
20#include "webrtc/system_wrappers/include/critical_section_wrapper.h"
21#include "webrtc/system_wrappers/include/ref_count.h"
22#include "webrtc/system_wrappers/include/scoped_vector.h"
23#include "webrtc/test/fake_texture_frame.h"
24#include "webrtc/video/send_statistics_proxy.h"
25
26using ::testing::_;
27using ::testing::Invoke;
28using ::testing::NiceMock;
29using ::testing::Return;
30using ::testing::WithArg;
31
32// If an output frame does not arrive in 500ms, the test will fail.
33#define FRAME_TIMEOUT_MS 500
34
35namespace webrtc {
36
37class MockVideoCaptureCallback : public VideoCaptureCallback {
38 public:
39  MOCK_METHOD1(DeliverFrame, void(VideoFrame video_frame));
40};
41
42bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
43bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
44bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2);
45bool EqualFramesVector(const ScopedVector<VideoFrame>& frames1,
46                       const ScopedVector<VideoFrame>& frames2);
47VideoFrame* CreateVideoFrame(uint8_t length);
48
49class VideoCaptureInputTest : public ::testing::Test {
50 protected:
51  VideoCaptureInputTest()
52      : mock_process_thread_(new NiceMock<MockProcessThread>),
53        mock_frame_callback_(new NiceMock<MockVideoCaptureCallback>),
54        output_frame_event_(false, false),
55        stats_proxy_(Clock::GetRealTimeClock(),
56                     webrtc::VideoSendStream::Config(nullptr),
57                     webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo) {}
58
59  virtual void SetUp() {
60    EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_))
61        .WillRepeatedly(
62            WithArg<0>(Invoke(this, &VideoCaptureInputTest::AddOutputFrame)));
63
64    Config config;
65    input_.reset(new internal::VideoCaptureInput(
66        mock_process_thread_.get(), mock_frame_callback_.get(), nullptr,
67        &stats_proxy_, nullptr, nullptr));
68  }
69
70  virtual void TearDown() {
71    // VideoCaptureInput accesses |mock_process_thread_| in destructor and
72    // should
73    // be deleted first.
74    input_.reset();
75  }
76
77  void AddInputFrame(VideoFrame* frame) {
78    input_->IncomingCapturedFrame(*frame);
79  }
80
81  void AddOutputFrame(const VideoFrame& frame) {
82    if (frame.native_handle() == NULL)
83      output_frame_ybuffers_.push_back(frame.buffer(kYPlane));
84    output_frames_.push_back(new VideoFrame(frame));
85    output_frame_event_.Set();
86  }
87
88  void WaitOutputFrame() {
89    EXPECT_TRUE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
90  }
91
92  rtc::scoped_ptr<MockProcessThread> mock_process_thread_;
93  rtc::scoped_ptr<MockVideoCaptureCallback> mock_frame_callback_;
94
95  // Used to send input capture frames to VideoCaptureInput.
96  rtc::scoped_ptr<internal::VideoCaptureInput> input_;
97
98  // Input capture frames of VideoCaptureInput.
99  ScopedVector<VideoFrame> input_frames_;
100
101  // Indicate an output frame has arrived.
102  rtc::Event output_frame_event_;
103
104  // Output delivered frames of VideoCaptureInput.
105  ScopedVector<VideoFrame> output_frames_;
106
107  // The pointers of Y plane buffers of output frames. This is used to verify
108  // the frame are swapped and not copied.
109  std::vector<const uint8_t*> output_frame_ybuffers_;
110  SendStatisticsProxy stats_proxy_;
111};
112
113TEST_F(VideoCaptureInputTest, DoesNotRetainHandleNorCopyBuffer) {
114  // Indicate an output frame has arrived.
115  rtc::Event frame_destroyed_event(false, false);
116  class TestBuffer : public webrtc::I420Buffer {
117   public:
118    explicit TestBuffer(rtc::Event* event) : I420Buffer(5, 5), event_(event) {}
119
120   private:
121    friend class rtc::RefCountedObject<TestBuffer>;
122    ~TestBuffer() override { event_->Set(); }
123    rtc::Event* const event_;
124  };
125
126  VideoFrame frame(
127      new rtc::RefCountedObject<TestBuffer>(&frame_destroyed_event), 1, 1,
128      kVideoRotation_0);
129
130  AddInputFrame(&frame);
131  WaitOutputFrame();
132
133  EXPECT_EQ(output_frames_[0]->video_frame_buffer().get(),
134            frame.video_frame_buffer().get());
135  output_frames_.clear();
136  frame.Reset();
137  EXPECT_TRUE(frame_destroyed_event.Wait(FRAME_TIMEOUT_MS));
138}
139
140TEST_F(VideoCaptureInputTest, TestNtpTimeStampSetIfRenderTimeSet) {
141  input_frames_.push_back(CreateVideoFrame(0));
142  input_frames_[0]->set_render_time_ms(5);
143  input_frames_[0]->set_ntp_time_ms(0);
144
145  AddInputFrame(input_frames_[0]);
146  WaitOutputFrame();
147  EXPECT_GT(output_frames_[0]->ntp_time_ms(),
148            input_frames_[0]->render_time_ms());
149}
150
151TEST_F(VideoCaptureInputTest, TestRtpTimeStampSet) {
152  input_frames_.push_back(CreateVideoFrame(0));
153  input_frames_[0]->set_render_time_ms(0);
154  input_frames_[0]->set_ntp_time_ms(1);
155  input_frames_[0]->set_timestamp(0);
156
157  AddInputFrame(input_frames_[0]);
158  WaitOutputFrame();
159  EXPECT_EQ(output_frames_[0]->timestamp(),
160            input_frames_[0]->ntp_time_ms() * 90);
161}
162
163TEST_F(VideoCaptureInputTest, DropsFramesWithSameOrOldNtpTimestamp) {
164  input_frames_.push_back(CreateVideoFrame(0));
165
166  input_frames_[0]->set_ntp_time_ms(17);
167  AddInputFrame(input_frames_[0]);
168  WaitOutputFrame();
169  EXPECT_EQ(output_frames_[0]->timestamp(),
170            input_frames_[0]->ntp_time_ms() * 90);
171
172  // Repeat frame with the same NTP timestamp should drop.
173  AddInputFrame(input_frames_[0]);
174  EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
175
176  // As should frames with a decreased NTP timestamp.
177  input_frames_[0]->set_ntp_time_ms(input_frames_[0]->ntp_time_ms() - 1);
178  AddInputFrame(input_frames_[0]);
179  EXPECT_FALSE(output_frame_event_.Wait(FRAME_TIMEOUT_MS));
180
181  // But delivering with an increased NTP timestamp should succeed.
182  input_frames_[0]->set_ntp_time_ms(4711);
183  AddInputFrame(input_frames_[0]);
184  WaitOutputFrame();
185  EXPECT_EQ(output_frames_[1]->timestamp(),
186            input_frames_[0]->ntp_time_ms() * 90);
187}
188
189TEST_F(VideoCaptureInputTest, TestTextureFrames) {
190  const int kNumFrame = 3;
191  for (int i = 0 ; i < kNumFrame; ++i) {
192    test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
193    // Add one to |i| so that width/height > 0.
194    input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
195        dummy_handle, i + 1, i + 1, i + 1, i + 1, webrtc::kVideoRotation_0)));
196    AddInputFrame(input_frames_[i]);
197    WaitOutputFrame();
198    EXPECT_EQ(dummy_handle, output_frames_[i]->native_handle());
199  }
200
201  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
202}
203
204TEST_F(VideoCaptureInputTest, TestI420Frames) {
205  const int kNumFrame = 4;
206  std::vector<const uint8_t*> ybuffer_pointers;
207  for (int i = 0; i < kNumFrame; ++i) {
208    input_frames_.push_back(CreateVideoFrame(static_cast<uint8_t>(i + 1)));
209    const VideoFrame* const_input_frame = input_frames_[i];
210    ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane));
211    AddInputFrame(input_frames_[i]);
212    WaitOutputFrame();
213  }
214
215  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
216  // Make sure the buffer is not copied.
217  for (int i = 0; i < kNumFrame; ++i)
218    EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
219}
220
221TEST_F(VideoCaptureInputTest, TestI420FrameAfterTextureFrame) {
222  test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
223  input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
224      dummy_handle, 1, 1, 1, 1, webrtc::kVideoRotation_0)));
225  AddInputFrame(input_frames_[0]);
226  WaitOutputFrame();
227  EXPECT_EQ(dummy_handle, output_frames_[0]->native_handle());
228
229  input_frames_.push_back(CreateVideoFrame(2));
230  AddInputFrame(input_frames_[1]);
231  WaitOutputFrame();
232
233  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
234}
235
236TEST_F(VideoCaptureInputTest, TestTextureFrameAfterI420Frame) {
237  input_frames_.push_back(CreateVideoFrame(1));
238  AddInputFrame(input_frames_[0]);
239  WaitOutputFrame();
240
241  test::FakeNativeHandle* dummy_handle = new test::FakeNativeHandle();
242  input_frames_.push_back(new VideoFrame(test::FakeNativeHandle::CreateFrame(
243      dummy_handle, 1, 1, 2, 2, webrtc::kVideoRotation_0)));
244  AddInputFrame(input_frames_[1]);
245  WaitOutputFrame();
246
247  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
248}
249
250bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
251  if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
252    return EqualTextureFrames(frame1, frame2);
253  return EqualBufferFrames(frame1, frame2);
254}
255
256bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
257  return ((frame1.native_handle() == frame2.native_handle()) &&
258          (frame1.width() == frame2.width()) &&
259          (frame1.height() == frame2.height()) &&
260          (frame1.render_time_ms() == frame2.render_time_ms()));
261}
262
263bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2) {
264  return ((frame1.width() == frame2.width()) &&
265          (frame1.height() == frame2.height()) &&
266          (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
267          (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
268          (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
269          (frame1.render_time_ms() == frame2.render_time_ms()) &&
270          (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
271          (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
272          (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
273          (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
274                  frame1.allocated_size(kYPlane)) == 0) &&
275          (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
276                  frame1.allocated_size(kUPlane)) == 0) &&
277          (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
278                  frame1.allocated_size(kVPlane)) == 0));
279}
280
281bool EqualFramesVector(const ScopedVector<VideoFrame>& frames1,
282                       const ScopedVector<VideoFrame>& frames2) {
283  if (frames1.size() != frames2.size())
284    return false;
285  for (size_t i = 0; i < frames1.size(); ++i) {
286    if (!EqualFrames(*frames1[i], *frames2[i]))
287      return false;
288  }
289  return true;
290}
291
292VideoFrame* CreateVideoFrame(uint8_t data) {
293  VideoFrame* frame = new VideoFrame();
294  const int width = 36;
295  const int height = 24;
296  const int kSizeY = width * height * 2;
297  uint8_t buffer[kSizeY];
298  memset(buffer, data, kSizeY);
299  frame->CreateFrame(buffer, buffer, buffer, width, height, width, width / 2,
300                     width / 2);
301  frame->set_render_time_ms(data);
302  return frame;
303}
304
305}  // namespace webrtc
306