1/*
2 *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10#include <stdio.h>
11
12#include <algorithm>
13#include <deque>
14#include <map>
15#include <sstream>
16#include <string>
17#include <vector>
18
19#include "testing/gtest/include/gtest/gtest.h"
20
21#include "webrtc/base/checks.h"
22#include "webrtc/base/event.h"
23#include "webrtc/base/format_macros.h"
24#include "webrtc/base/scoped_ptr.h"
25#include "webrtc/call.h"
26#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
27#include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
28#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
29#include "webrtc/system_wrappers/include/cpu_info.h"
30#include "webrtc/test/layer_filtering_transport.h"
31#include "webrtc/test/run_loop.h"
32#include "webrtc/test/statistics.h"
33#include "webrtc/test/testsupport/fileutils.h"
34#include "webrtc/test/video_renderer.h"
35#include "webrtc/video/video_quality_test.h"
36
37namespace webrtc {
38
39static const int kSendStatsPollingIntervalMs = 1000;
40static const int kPayloadTypeVP8 = 123;
41static const int kPayloadTypeVP9 = 124;
42
43class VideoAnalyzer : public PacketReceiver,
44                      public Transport,
45                      public VideoRenderer,
46                      public VideoCaptureInput,
47                      public EncodedFrameObserver,
48                      public EncodingTimeObserver {
49 public:
50  VideoAnalyzer(test::LayerFilteringTransport* transport,
51                const std::string& test_label,
52                double avg_psnr_threshold,
53                double avg_ssim_threshold,
54                int duration_frames,
55                FILE* graph_data_output_file,
56                const std::string& graph_title,
57                uint32_t ssrc_to_analyze)
58      : input_(nullptr),
59        transport_(transport),
60        receiver_(nullptr),
61        send_stream_(nullptr),
62        test_label_(test_label),
63        graph_data_output_file_(graph_data_output_file),
64        graph_title_(graph_title),
65        ssrc_to_analyze_(ssrc_to_analyze),
66        frames_to_process_(duration_frames),
67        frames_recorded_(0),
68        frames_processed_(0),
69        dropped_frames_(0),
70        last_render_time_(0),
71        rtp_timestamp_delta_(0),
72        avg_psnr_threshold_(avg_psnr_threshold),
73        avg_ssim_threshold_(avg_ssim_threshold),
74        stats_polling_thread_(&PollStatsThread, this, "StatsPoller"),
75        comparison_available_event_(false, false),
76        done_(false, false) {
77    // Create thread pool for CPU-expensive PSNR/SSIM calculations.
78
79    // Try to use about as many threads as cores, but leave kMinCoresLeft alone,
80    // so that we don't accidentally starve "real" worker threads (codec etc).
81    // Also, don't allocate more than kMaxComparisonThreads, even if there are
82    // spare cores.
83
84    uint32_t num_cores = CpuInfo::DetectNumberOfCores();
85    RTC_DCHECK_GE(num_cores, 1u);
86    static const uint32_t kMinCoresLeft = 4;
87    static const uint32_t kMaxComparisonThreads = 8;
88
89    if (num_cores <= kMinCoresLeft) {
90      num_cores = 1;
91    } else {
92      num_cores -= kMinCoresLeft;
93      num_cores = std::min(num_cores, kMaxComparisonThreads);
94    }
95
96    for (uint32_t i = 0; i < num_cores; ++i) {
97      rtc::PlatformThread* thread =
98          new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer");
99      thread->Start();
100      comparison_thread_pool_.push_back(thread);
101    }
102  }
103
104  ~VideoAnalyzer() {
105    for (rtc::PlatformThread* thread : comparison_thread_pool_) {
106      thread->Stop();
107      delete thread;
108    }
109  }
110
111  virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
112
113  DeliveryStatus DeliverPacket(MediaType media_type,
114                               const uint8_t* packet,
115                               size_t length,
116                               const PacketTime& packet_time) override {
117    RtpUtility::RtpHeaderParser parser(packet, length);
118    RTPHeader header;
119    parser.Parse(&header);
120    {
121      rtc::CritScope lock(&crit_);
122      recv_times_[header.timestamp - rtp_timestamp_delta_] =
123          Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
124    }
125
126    return receiver_->DeliverPacket(media_type, packet, length, packet_time);
127  }
128
129  // EncodingTimeObserver.
130  void OnReportEncodedTime(int64_t ntp_time_ms, int encode_time_ms) override {
131    rtc::CritScope crit(&comparison_lock_);
132    samples_encode_time_ms_[ntp_time_ms] = encode_time_ms;
133  }
134
135  void IncomingCapturedFrame(const VideoFrame& video_frame) override {
136    VideoFrame copy = video_frame;
137    copy.set_timestamp(copy.ntp_time_ms() * 90);
138
139    {
140      rtc::CritScope lock(&crit_);
141      if (first_send_frame_.IsZeroSize() && rtp_timestamp_delta_ == 0)
142        first_send_frame_ = copy;
143
144      frames_.push_back(copy);
145    }
146
147    input_->IncomingCapturedFrame(video_frame);
148  }
149
150  bool SendRtp(const uint8_t* packet,
151               size_t length,
152               const PacketOptions& options) override {
153    RtpUtility::RtpHeaderParser parser(packet, length);
154    RTPHeader header;
155    parser.Parse(&header);
156
157    int64_t current_time =
158        Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
159    bool result = transport_->SendRtp(packet, length, options);
160    {
161      rtc::CritScope lock(&crit_);
162      if (rtp_timestamp_delta_ == 0) {
163        rtp_timestamp_delta_ = header.timestamp - first_send_frame_.timestamp();
164        first_send_frame_.Reset();
165      }
166      uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
167      send_times_[timestamp] = current_time;
168      if (!transport_->DiscardedLastPacket() &&
169          header.ssrc == ssrc_to_analyze_) {
170        encoded_frame_sizes_[timestamp] +=
171            length - (header.headerLength + header.paddingLength);
172      }
173    }
174    return result;
175  }
176
177  bool SendRtcp(const uint8_t* packet, size_t length) override {
178    return transport_->SendRtcp(packet, length);
179  }
180
181  void EncodedFrameCallback(const EncodedFrame& frame) override {
182    rtc::CritScope lock(&comparison_lock_);
183    if (frames_recorded_ < frames_to_process_)
184      encoded_frame_size_.AddSample(frame.length_);
185  }
186
187  void RenderFrame(const VideoFrame& video_frame,
188                   int time_to_render_ms) override {
189    int64_t render_time_ms =
190        Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
191    uint32_t send_timestamp = video_frame.timestamp() - rtp_timestamp_delta_;
192
193    rtc::CritScope lock(&crit_);
194
195    while (frames_.front().timestamp() < send_timestamp) {
196      AddFrameComparison(frames_.front(), last_rendered_frame_, true,
197                         render_time_ms);
198      frames_.pop_front();
199    }
200
201    VideoFrame reference_frame = frames_.front();
202    frames_.pop_front();
203    assert(!reference_frame.IsZeroSize());
204    if (send_timestamp == reference_frame.timestamp() - 1) {
205      // TODO(ivica): Make this work for > 2 streams.
206      // Look at rtp_sender.c:RTPSender::BuildRTPHeader.
207      ++send_timestamp;
208    }
209    EXPECT_EQ(reference_frame.timestamp(), send_timestamp);
210    assert(reference_frame.timestamp() == send_timestamp);
211
212    AddFrameComparison(reference_frame, video_frame, false, render_time_ms);
213
214    last_rendered_frame_ = video_frame;
215  }
216
217  bool IsTextureSupported() const override { return false; }
218
219  void Wait() {
220    // Frame comparisons can be very expensive. Wait for test to be done, but
221    // at time-out check if frames_processed is going up. If so, give it more
222    // time, otherwise fail. Hopefully this will reduce test flakiness.
223
224    stats_polling_thread_.Start();
225
226    int last_frames_processed = -1;
227    int iteration = 0;
228    while (!done_.Wait(VideoQualityTest::kDefaultTimeoutMs)) {
229      int frames_processed;
230      {
231        rtc::CritScope crit(&comparison_lock_);
232        frames_processed = frames_processed_;
233      }
234
235      // Print some output so test infrastructure won't think we've crashed.
236      const char* kKeepAliveMessages[3] = {
237          "Uh, I'm-I'm not quite dead, sir.",
238          "Uh, I-I think uh, I could pull through, sir.",
239          "Actually, I think I'm all right to come with you--"};
240      printf("- %s\n", kKeepAliveMessages[iteration++ % 3]);
241
242      if (last_frames_processed == -1) {
243        last_frames_processed = frames_processed;
244        continue;
245      }
246      ASSERT_GT(frames_processed, last_frames_processed)
247          << "Analyzer stalled while waiting for test to finish.";
248      last_frames_processed = frames_processed;
249    }
250
251    if (iteration > 0)
252      printf("- Farewell, sweet Concorde!\n");
253
254    // Signal stats polling thread if that is still waiting and stop it now,
255    // since it uses the send_stream_ reference that might be reclaimed after
256    // returning from this method.
257    done_.Set();
258    stats_polling_thread_.Stop();
259  }
260
261  VideoCaptureInput* input_;
262  test::LayerFilteringTransport* const transport_;
263  PacketReceiver* receiver_;
264  VideoSendStream* send_stream_;
265
266 private:
267  struct FrameComparison {
268    FrameComparison()
269        : dropped(false),
270          send_time_ms(0),
271          recv_time_ms(0),
272          render_time_ms(0),
273          encoded_frame_size(0) {}
274
275    FrameComparison(const VideoFrame& reference,
276                    const VideoFrame& render,
277                    bool dropped,
278                    int64_t send_time_ms,
279                    int64_t recv_time_ms,
280                    int64_t render_time_ms,
281                    size_t encoded_frame_size)
282        : reference(reference),
283          render(render),
284          dropped(dropped),
285          send_time_ms(send_time_ms),
286          recv_time_ms(recv_time_ms),
287          render_time_ms(render_time_ms),
288          encoded_frame_size(encoded_frame_size) {}
289
290    VideoFrame reference;
291    VideoFrame render;
292    bool dropped;
293    int64_t send_time_ms;
294    int64_t recv_time_ms;
295    int64_t render_time_ms;
296    size_t encoded_frame_size;
297  };
298
299  struct Sample {
300    Sample(int dropped,
301           int64_t input_time_ms,
302           int64_t send_time_ms,
303           int64_t recv_time_ms,
304           int64_t render_time_ms,
305           size_t encoded_frame_size,
306           double psnr,
307           double ssim)
308        : dropped(dropped),
309          input_time_ms(input_time_ms),
310          send_time_ms(send_time_ms),
311          recv_time_ms(recv_time_ms),
312          render_time_ms(render_time_ms),
313          encoded_frame_size(encoded_frame_size),
314          psnr(psnr),
315          ssim(ssim) {}
316
317    int dropped;
318    int64_t input_time_ms;
319    int64_t send_time_ms;
320    int64_t recv_time_ms;
321    int64_t render_time_ms;
322    size_t encoded_frame_size;
323    double psnr;
324    double ssim;
325  };
326
327  void AddFrameComparison(const VideoFrame& reference,
328                          const VideoFrame& render,
329                          bool dropped,
330                          int64_t render_time_ms)
331      EXCLUSIVE_LOCKS_REQUIRED(crit_) {
332    int64_t send_time_ms = send_times_[reference.timestamp()];
333    send_times_.erase(reference.timestamp());
334    int64_t recv_time_ms = recv_times_[reference.timestamp()];
335    recv_times_.erase(reference.timestamp());
336
337    // TODO(ivica): Make this work for > 2 streams.
338    auto it = encoded_frame_sizes_.find(reference.timestamp());
339    if (it == encoded_frame_sizes_.end())
340      it = encoded_frame_sizes_.find(reference.timestamp() - 1);
341    size_t encoded_size = it == encoded_frame_sizes_.end() ? 0 : it->second;
342    if (it != encoded_frame_sizes_.end())
343      encoded_frame_sizes_.erase(it);
344
345    VideoFrame reference_copy;
346    VideoFrame render_copy;
347    reference_copy.CopyFrame(reference);
348    render_copy.CopyFrame(render);
349
350    rtc::CritScope crit(&comparison_lock_);
351    comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped,
352                                           send_time_ms, recv_time_ms,
353                                           render_time_ms, encoded_size));
354    comparison_available_event_.Set();
355  }
356
357  static bool PollStatsThread(void* obj) {
358    return static_cast<VideoAnalyzer*>(obj)->PollStats();
359  }
360
361  bool PollStats() {
362    if (done_.Wait(kSendStatsPollingIntervalMs)) {
363      // Set event again to make sure main thread is also signaled, then we're
364      // done.
365      done_.Set();
366      return false;
367    }
368
369    VideoSendStream::Stats stats = send_stream_->GetStats();
370
371    rtc::CritScope crit(&comparison_lock_);
372    encode_frame_rate_.AddSample(stats.encode_frame_rate);
373    encode_time_ms.AddSample(stats.avg_encode_time_ms);
374    encode_usage_percent.AddSample(stats.encode_usage_percent);
375    media_bitrate_bps.AddSample(stats.media_bitrate_bps);
376
377    return true;
378  }
379
380  static bool FrameComparisonThread(void* obj) {
381    return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
382  }
383
384  bool CompareFrames() {
385    if (AllFramesRecorded())
386      return false;
387
388    VideoFrame reference;
389    VideoFrame render;
390    FrameComparison comparison;
391
392    if (!PopComparison(&comparison)) {
393      // Wait until new comparison task is available, or test is done.
394      // If done, wake up remaining threads waiting.
395      comparison_available_event_.Wait(1000);
396      if (AllFramesRecorded()) {
397        comparison_available_event_.Set();
398        return false;
399      }
400      return true;  // Try again.
401    }
402
403    PerformFrameComparison(comparison);
404
405    if (FrameProcessed()) {
406      PrintResults();
407      if (graph_data_output_file_)
408        PrintSamplesToFile();
409      done_.Set();
410      comparison_available_event_.Set();
411      return false;
412    }
413
414    return true;
415  }
416
417  bool PopComparison(FrameComparison* comparison) {
418    rtc::CritScope crit(&comparison_lock_);
419    // If AllFramesRecorded() is true, it means we have already popped
420    // frames_to_process_ frames from comparisons_, so there is no more work
421    // for this thread to be done. frames_processed_ might still be lower if
422    // all comparisons are not done, but those frames are currently being
423    // worked on by other threads.
424    if (comparisons_.empty() || AllFramesRecorded())
425      return false;
426
427    *comparison = comparisons_.front();
428    comparisons_.pop_front();
429
430    FrameRecorded();
431    return true;
432  }
433
434  // Increment counter for number of frames received for comparison.
435  void FrameRecorded() {
436    rtc::CritScope crit(&comparison_lock_);
437    ++frames_recorded_;
438  }
439
440  // Returns true if all frames to be compared have been taken from the queue.
441  bool AllFramesRecorded() {
442    rtc::CritScope crit(&comparison_lock_);
443    assert(frames_recorded_ <= frames_to_process_);
444    return frames_recorded_ == frames_to_process_;
445  }
446
447  // Increase count of number of frames processed. Returns true if this was the
448  // last frame to be processed.
449  bool FrameProcessed() {
450    rtc::CritScope crit(&comparison_lock_);
451    ++frames_processed_;
452    assert(frames_processed_ <= frames_to_process_);
453    return frames_processed_ == frames_to_process_;
454  }
455
456  void PrintResults() {
457    rtc::CritScope crit(&comparison_lock_);
458    PrintResult("psnr", psnr_, " dB");
459    PrintResult("ssim", ssim_, "");
460    PrintResult("sender_time", sender_time_, " ms");
461    printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
462           dropped_frames_);
463    PrintResult("receiver_time", receiver_time_, " ms");
464    PrintResult("total_delay_incl_network", end_to_end_, " ms");
465    PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
466    PrintResult("encoded_frame_size", encoded_frame_size_, " bytes");
467    PrintResult("encode_frame_rate", encode_frame_rate_, " fps");
468    PrintResult("encode_time", encode_time_ms, " ms");
469    PrintResult("encode_usage_percent", encode_usage_percent, " percent");
470    PrintResult("media_bitrate", media_bitrate_bps, " bps");
471
472    EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
473    EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
474  }
475
476  void PerformFrameComparison(const FrameComparison& comparison) {
477    // Perform expensive psnr and ssim calculations while not holding lock.
478    double psnr = I420PSNR(&comparison.reference, &comparison.render);
479    double ssim = I420SSIM(&comparison.reference, &comparison.render);
480
481    int64_t input_time_ms = comparison.reference.ntp_time_ms();
482
483    rtc::CritScope crit(&comparison_lock_);
484    if (graph_data_output_file_) {
485      samples_.push_back(
486          Sample(comparison.dropped, input_time_ms, comparison.send_time_ms,
487                 comparison.recv_time_ms, comparison.render_time_ms,
488                 comparison.encoded_frame_size, psnr, ssim));
489    }
490    psnr_.AddSample(psnr);
491    ssim_.AddSample(ssim);
492
493    if (comparison.dropped) {
494      ++dropped_frames_;
495      return;
496    }
497    if (last_render_time_ != 0)
498      rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
499    last_render_time_ = comparison.render_time_ms;
500
501    sender_time_.AddSample(comparison.send_time_ms - input_time_ms);
502    receiver_time_.AddSample(comparison.render_time_ms -
503                             comparison.recv_time_ms);
504    end_to_end_.AddSample(comparison.render_time_ms - input_time_ms);
505    encoded_frame_size_.AddSample(comparison.encoded_frame_size);
506  }
507
508  void PrintResult(const char* result_type,
509                   test::Statistics stats,
510                   const char* unit) {
511    printf("RESULT %s: %s = {%f, %f}%s\n",
512           result_type,
513           test_label_.c_str(),
514           stats.Mean(),
515           stats.StandardDeviation(),
516           unit);
517  }
518
519  void PrintSamplesToFile(void) {
520    FILE* out = graph_data_output_file_;
521    rtc::CritScope crit(&comparison_lock_);
522    std::sort(samples_.begin(), samples_.end(),
523              [](const Sample& A, const Sample& B) -> bool {
524                return A.input_time_ms < B.input_time_ms;
525              });
526
527    fprintf(out, "%s\n", graph_title_.c_str());
528    fprintf(out, "%" PRIuS "\n", samples_.size());
529    fprintf(out,
530            "dropped "
531            "input_time_ms "
532            "send_time_ms "
533            "recv_time_ms "
534            "render_time_ms "
535            "encoded_frame_size "
536            "psnr "
537            "ssim "
538            "encode_time_ms\n");
539    int missing_encode_time_samples = 0;
540    for (const Sample& sample : samples_) {
541      auto it = samples_encode_time_ms_.find(sample.input_time_ms);
542      int encode_time_ms;
543      if (it != samples_encode_time_ms_.end()) {
544        encode_time_ms = it->second;
545      } else {
546        ++missing_encode_time_samples;
547        encode_time_ms = -1;
548      }
549      fprintf(out, "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" PRIuS
550                   " %lf %lf %d\n",
551              sample.dropped, sample.input_time_ms, sample.send_time_ms,
552              sample.recv_time_ms, sample.render_time_ms,
553              sample.encoded_frame_size, sample.psnr, sample.ssim,
554              encode_time_ms);
555    }
556    if (missing_encode_time_samples) {
557      fprintf(stderr,
558              "Warning: Missing encode_time_ms samples for %d frame(s).\n",
559              missing_encode_time_samples);
560    }
561  }
562
563  const std::string test_label_;
564  FILE* const graph_data_output_file_;
565  const std::string graph_title_;
566  const uint32_t ssrc_to_analyze_;
567  std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
568  std::map<int64_t, int> samples_encode_time_ms_ GUARDED_BY(comparison_lock_);
569  test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
570  test::Statistics receiver_time_ GUARDED_BY(comparison_lock_);
571  test::Statistics psnr_ GUARDED_BY(comparison_lock_);
572  test::Statistics ssim_ GUARDED_BY(comparison_lock_);
573  test::Statistics end_to_end_ GUARDED_BY(comparison_lock_);
574  test::Statistics rendered_delta_ GUARDED_BY(comparison_lock_);
575  test::Statistics encoded_frame_size_ GUARDED_BY(comparison_lock_);
576  test::Statistics encode_frame_rate_ GUARDED_BY(comparison_lock_);
577  test::Statistics encode_time_ms GUARDED_BY(comparison_lock_);
578  test::Statistics encode_usage_percent GUARDED_BY(comparison_lock_);
579  test::Statistics media_bitrate_bps GUARDED_BY(comparison_lock_);
580
581  const int frames_to_process_;
582  int frames_recorded_;
583  int frames_processed_;
584  int dropped_frames_;
585  int64_t last_render_time_;
586  uint32_t rtp_timestamp_delta_;
587
588  rtc::CriticalSection crit_;
589  std::deque<VideoFrame> frames_ GUARDED_BY(crit_);
590  VideoFrame last_rendered_frame_ GUARDED_BY(crit_);
591  std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_);
592  std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_);
593  std::map<uint32_t, size_t> encoded_frame_sizes_ GUARDED_BY(crit_);
594  VideoFrame first_send_frame_ GUARDED_BY(crit_);
595  const double avg_psnr_threshold_;
596  const double avg_ssim_threshold_;
597
598  rtc::CriticalSection comparison_lock_;
599  std::vector<rtc::PlatformThread*> comparison_thread_pool_;
600  rtc::PlatformThread stats_polling_thread_;
601  rtc::Event comparison_available_event_;
602  std::deque<FrameComparison> comparisons_ GUARDED_BY(comparison_lock_);
603  rtc::Event done_;
604};
605
606VideoQualityTest::VideoQualityTest() : clock_(Clock::GetRealTimeClock()) {}
607
608void VideoQualityTest::TestBody() {}
609
610std::string VideoQualityTest::GenerateGraphTitle() const {
611  std::stringstream ss;
612  ss << params_.common.codec;
613  ss << " (" << params_.common.target_bitrate_bps / 1000 << "kbps";
614  ss << ", " << params_.common.fps << " FPS";
615  if (params_.screenshare.scroll_duration)
616    ss << ", " << params_.screenshare.scroll_duration << "s scroll";
617  if (params_.ss.streams.size() > 1)
618    ss << ", Stream #" << params_.ss.selected_stream;
619  if (params_.ss.num_spatial_layers > 1)
620    ss << ", Layer #" << params_.ss.selected_sl;
621  ss << ")";
622  return ss.str();
623}
624
625void VideoQualityTest::CheckParams() {
626  // Add a default stream in none specified.
627  if (params_.ss.streams.empty())
628    params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_));
629  if (params_.ss.num_spatial_layers == 0)
630    params_.ss.num_spatial_layers = 1;
631
632  if (params_.pipe.loss_percent != 0 ||
633      params_.pipe.queue_length_packets != 0) {
634    // Since LayerFilteringTransport changes the sequence numbers, we can't
635    // use that feature with pack loss, since the NACK request would end up
636    // retransmitting the wrong packets.
637    RTC_CHECK(params_.ss.selected_sl == -1 ||
638              params_.ss.selected_sl == params_.ss.num_spatial_layers - 1);
639    RTC_CHECK(params_.common.selected_tl == -1 ||
640              params_.common.selected_tl ==
641                  params_.common.num_temporal_layers - 1);
642  }
643
644  // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it
645  // does in some parts of the code?
646  RTC_CHECK_GE(params_.common.max_bitrate_bps,
647               params_.common.target_bitrate_bps);
648  RTC_CHECK_GE(params_.common.target_bitrate_bps,
649               params_.common.min_bitrate_bps);
650  RTC_CHECK_LT(params_.common.selected_tl, params_.common.num_temporal_layers);
651  RTC_CHECK_LT(params_.ss.selected_stream, params_.ss.streams.size());
652  for (const VideoStream& stream : params_.ss.streams) {
653    RTC_CHECK_GE(stream.min_bitrate_bps, 0);
654    RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
655    RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
656    RTC_CHECK_EQ(static_cast<int>(stream.temporal_layer_thresholds_bps.size()),
657                 params_.common.num_temporal_layers - 1);
658  }
659  // TODO(ivica): Should we check if the sum of all streams/layers is equal to
660  // the total bitrate? We anyway have to update them in the case bitrate
661  // estimator changes the total bitrates.
662  RTC_CHECK_GE(params_.ss.num_spatial_layers, 1);
663  RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers);
664  RTC_CHECK(params_.ss.spatial_layers.empty() ||
665            params_.ss.spatial_layers.size() ==
666                static_cast<size_t>(params_.ss.num_spatial_layers));
667  if (params_.common.codec == "VP8") {
668    RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
669  } else if (params_.common.codec == "VP9") {
670    RTC_CHECK_EQ(params_.ss.streams.size(), 1u);
671  }
672}
673
674// Static.
675std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
676  // Parse comma separated nonnegative integers, where some elements may be
677  // empty. The empty values are replaced with -1.
678  // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
679  // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
680  std::vector<int> result;
681  if (str.empty())
682    return result;
683
684  const char* p = str.c_str();
685  int value = -1;
686  int pos;
687  while (*p) {
688    if (*p == ',') {
689      result.push_back(value);
690      value = -1;
691      ++p;
692      continue;
693    }
694    RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
695        << "Unexpected non-number value.";
696    p += pos;
697  }
698  result.push_back(value);
699  return result;
700}
701
702// Static.
703VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) {
704  VideoStream stream;
705  stream.width = params.common.width;
706  stream.height = params.common.height;
707  stream.max_framerate = params.common.fps;
708  stream.min_bitrate_bps = params.common.min_bitrate_bps;
709  stream.target_bitrate_bps = params.common.target_bitrate_bps;
710  stream.max_bitrate_bps = params.common.max_bitrate_bps;
711  stream.max_qp = 52;
712  if (params.common.num_temporal_layers == 2)
713    stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
714  return stream;
715}
716
717// Static.
718void VideoQualityTest::FillScalabilitySettings(
719    Params* params,
720    const std::vector<std::string>& stream_descriptors,
721    size_t selected_stream,
722    int num_spatial_layers,
723    int selected_sl,
724    const std::vector<std::string>& sl_descriptors) {
725  // Read VideoStream and SpatialLayer elements from a list of comma separated
726  // lists. To use a default value for an element, use -1 or leave empty.
727  // Validity checks performed in CheckParams.
728
729  RTC_CHECK(params->ss.streams.empty());
730  for (auto descriptor : stream_descriptors) {
731    if (descriptor.empty())
732      continue;
733    VideoStream stream = VideoQualityTest::DefaultVideoStream(*params);
734    std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
735    if (v[0] != -1)
736      stream.width = static_cast<size_t>(v[0]);
737    if (v[1] != -1)
738      stream.height = static_cast<size_t>(v[1]);
739    if (v[2] != -1)
740      stream.max_framerate = v[2];
741    if (v[3] != -1)
742      stream.min_bitrate_bps = v[3];
743    if (v[4] != -1)
744      stream.target_bitrate_bps = v[4];
745    if (v[5] != -1)
746      stream.max_bitrate_bps = v[5];
747    if (v.size() > 6 && v[6] != -1)
748      stream.max_qp = v[6];
749    if (v.size() > 7) {
750      stream.temporal_layer_thresholds_bps.clear();
751      stream.temporal_layer_thresholds_bps.insert(
752          stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end());
753    } else {
754      // Automatic TL thresholds for more than two layers not supported.
755      RTC_CHECK_LE(params->common.num_temporal_layers, 2);
756    }
757    params->ss.streams.push_back(stream);
758  }
759  params->ss.selected_stream = selected_stream;
760
761  params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1;
762  params->ss.selected_sl = selected_sl;
763  RTC_CHECK(params->ss.spatial_layers.empty());
764  for (auto descriptor : sl_descriptors) {
765    if (descriptor.empty())
766      continue;
767    std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
768    RTC_CHECK_GT(v[2], 0);
769
770    SpatialLayer layer;
771    layer.scaling_factor_num = v[0] == -1 ? 1 : v[0];
772    layer.scaling_factor_den = v[1] == -1 ? 1 : v[1];
773    layer.target_bitrate_bps = v[2];
774    params->ss.spatial_layers.push_back(layer);
775  }
776}
777
778void VideoQualityTest::SetupCommon(Transport* send_transport,
779                                   Transport* recv_transport) {
780  if (params_.logs)
781    trace_to_stderr_.reset(new test::TraceToStderr);
782
783  size_t num_streams = params_.ss.streams.size();
784  CreateSendConfig(num_streams, 0, send_transport);
785
786  int payload_type;
787  if (params_.common.codec == "VP8") {
788    encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp8));
789    payload_type = kPayloadTypeVP8;
790  } else if (params_.common.codec == "VP9") {
791    encoder_.reset(VideoEncoder::Create(VideoEncoder::kVp9));
792    payload_type = kPayloadTypeVP9;
793  } else {
794    RTC_NOTREACHED() << "Codec not supported!";
795    return;
796  }
797  video_send_config_.encoder_settings.encoder = encoder_.get();
798  video_send_config_.encoder_settings.payload_name = params_.common.codec;
799  video_send_config_.encoder_settings.payload_type = payload_type;
800  video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
801  video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
802  for (size_t i = 0; i < num_streams; ++i)
803    video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
804
805  video_send_config_.rtp.extensions.clear();
806  if (params_.common.send_side_bwe) {
807    video_send_config_.rtp.extensions.push_back(
808        RtpExtension(RtpExtension::kTransportSequenceNumber,
809                     test::kTransportSequenceNumberExtensionId));
810  } else {
811    video_send_config_.rtp.extensions.push_back(RtpExtension(
812        RtpExtension::kAbsSendTime, test::kAbsSendTimeExtensionId));
813  }
814
815  video_encoder_config_.min_transmit_bitrate_bps =
816      params_.common.min_transmit_bps;
817  video_encoder_config_.streams = params_.ss.streams;
818  video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
819
820  CreateMatchingReceiveConfigs(recv_transport);
821
822  for (size_t i = 0; i < num_streams; ++i) {
823    video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
824    video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].ssrc =
825        kSendRtxSsrcs[i];
826    video_receive_configs_[i].rtp.rtx[kSendRtxPayloadType].payload_type =
827        kSendRtxPayloadType;
828    video_receive_configs_[i].rtp.transport_cc = params_.common.send_side_bwe;
829  }
830}
831
832void VideoQualityTest::SetupScreenshare() {
833  RTC_CHECK(params_.screenshare.enabled);
834
835  // Fill out codec settings.
836  video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
837  if (params_.common.codec == "VP8") {
838    codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
839    codec_settings_.VP8.denoisingOn = false;
840    codec_settings_.VP8.frameDroppingOn = false;
841    codec_settings_.VP8.numberOfTemporalLayers =
842        static_cast<unsigned char>(params_.common.num_temporal_layers);
843    video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
844  } else if (params_.common.codec == "VP9") {
845    codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
846    codec_settings_.VP9.denoisingOn = false;
847    codec_settings_.VP9.frameDroppingOn = false;
848    codec_settings_.VP9.numberOfTemporalLayers =
849        static_cast<unsigned char>(params_.common.num_temporal_layers);
850    video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
851    codec_settings_.VP9.numberOfSpatialLayers =
852        static_cast<unsigned char>(params_.ss.num_spatial_layers);
853  }
854
855  // Setup frame generator.
856  const size_t kWidth = 1850;
857  const size_t kHeight = 1110;
858  std::vector<std::string> slides;
859  slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
860  slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
861  slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
862  slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
863
864  if (params_.screenshare.scroll_duration == 0) {
865    // Cycle image every slide_change_interval seconds.
866    frame_generator_.reset(test::FrameGenerator::CreateFromYuvFile(
867        slides, kWidth, kHeight,
868        params_.screenshare.slide_change_interval * params_.common.fps));
869  } else {
870    RTC_CHECK_LE(params_.common.width, kWidth);
871    RTC_CHECK_LE(params_.common.height, kHeight);
872    RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
873    const int kPauseDurationMs = (params_.screenshare.slide_change_interval -
874                                  params_.screenshare.scroll_duration) *
875                                 1000;
876    RTC_CHECK_LE(params_.screenshare.scroll_duration,
877                 params_.screenshare.slide_change_interval);
878
879    frame_generator_.reset(
880        test::FrameGenerator::CreateScrollingInputFromYuvFiles(
881            clock_, slides, kWidth, kHeight, params_.common.width,
882            params_.common.height, params_.screenshare.scroll_duration * 1000,
883            kPauseDurationMs));
884  }
885}
886
887void VideoQualityTest::CreateCapturer(VideoCaptureInput* input) {
888  if (params_.screenshare.enabled) {
889    test::FrameGeneratorCapturer* frame_generator_capturer =
890        new test::FrameGeneratorCapturer(
891            clock_, input, frame_generator_.release(), params_.common.fps);
892    EXPECT_TRUE(frame_generator_capturer->Init());
893    capturer_.reset(frame_generator_capturer);
894  } else {
895    if (params_.video.clip_name.empty()) {
896      capturer_.reset(test::VideoCapturer::Create(input, params_.common.width,
897                                                  params_.common.height,
898                                                  params_.common.fps, clock_));
899    } else {
900      capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
901          input, test::ResourcePath(params_.video.clip_name, "yuv"),
902          params_.common.width, params_.common.height, params_.common.fps,
903          clock_));
904      ASSERT_TRUE(capturer_.get() != nullptr)
905          << "Could not create capturer for " << params_.video.clip_name
906          << ".yuv. Is this resource file present?";
907    }
908  }
909}
910
911void VideoQualityTest::RunWithAnalyzer(const Params& params) {
912  params_ = params;
913
914  // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
915  // differentiate between the analyzer and the renderer case.
916  CheckParams();
917
918  FILE* graph_data_output_file = nullptr;
919  if (!params_.analyzer.graph_data_output_filename.empty()) {
920    graph_data_output_file =
921        fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
922    RTC_CHECK(graph_data_output_file != nullptr)
923        << "Can't open the file " << params_.analyzer.graph_data_output_filename
924        << "!";
925  }
926
927  Call::Config call_config;
928  call_config.bitrate_config = params.common.call_bitrate_config;
929  CreateCalls(call_config, call_config);
930
931  test::LayerFilteringTransport send_transport(
932      params.pipe, sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
933      params.common.selected_tl, params_.ss.selected_sl);
934  test::DirectTransport recv_transport(params.pipe, receiver_call_.get());
935
936  std::string graph_title = params_.analyzer.graph_title;
937  if (graph_title.empty())
938    graph_title = VideoQualityTest::GenerateGraphTitle();
939
940  // In the case of different resolutions, the functions calculating PSNR and
941  // SSIM return -1.0, instead of a positive value as usual. VideoAnalyzer
942  // aborts if the average psnr/ssim are below the given threshold, which is
943  // 0.0 by default. Setting the thresholds to -1.1 prevents the unnecessary
944  // abort.
945  VideoStream& selected_stream = params_.ss.streams[params_.ss.selected_stream];
946  int selected_sl = params_.ss.selected_sl != -1
947                        ? params_.ss.selected_sl
948                        : params_.ss.num_spatial_layers - 1;
949  bool disable_quality_check =
950      selected_stream.width != params_.common.width ||
951      selected_stream.height != params_.common.height ||
952      (!params_.ss.spatial_layers.empty() &&
953       params_.ss.spatial_layers[selected_sl].scaling_factor_num !=
954           params_.ss.spatial_layers[selected_sl].scaling_factor_den);
955  if (disable_quality_check) {
956    fprintf(stderr,
957            "Warning: Calculating PSNR and SSIM for downsized resolution "
958            "not implemented yet! Skipping PSNR and SSIM calculations!");
959  }
960
961  VideoAnalyzer analyzer(
962      &send_transport, params_.analyzer.test_label,
963      disable_quality_check ? -1.1 : params_.analyzer.avg_psnr_threshold,
964      disable_quality_check ? -1.1 : params_.analyzer.avg_ssim_threshold,
965      params_.analyzer.test_durations_secs * params_.common.fps,
966      graph_data_output_file, graph_title,
967      kVideoSendSsrcs[params_.ss.selected_stream]);
968
969  analyzer.SetReceiver(receiver_call_->Receiver());
970  send_transport.SetReceiver(&analyzer);
971  recv_transport.SetReceiver(sender_call_->Receiver());
972
973  SetupCommon(&analyzer, &recv_transport);
974  video_send_config_.encoding_time_observer = &analyzer;
975  video_receive_configs_[params_.ss.selected_stream].renderer = &analyzer;
976  for (auto& config : video_receive_configs_)
977    config.pre_decode_callback = &analyzer;
978
979  if (params_.screenshare.enabled)
980    SetupScreenshare();
981
982  CreateVideoStreams();
983  analyzer.input_ = video_send_stream_->Input();
984  analyzer.send_stream_ = video_send_stream_;
985
986  CreateCapturer(&analyzer);
987
988  video_send_stream_->Start();
989  for (VideoReceiveStream* receive_stream : video_receive_streams_)
990    receive_stream->Start();
991  capturer_->Start();
992
993  analyzer.Wait();
994
995  send_transport.StopSending();
996  recv_transport.StopSending();
997
998  capturer_->Stop();
999  for (VideoReceiveStream* receive_stream : video_receive_streams_)
1000    receive_stream->Stop();
1001  video_send_stream_->Stop();
1002
1003  DestroyStreams();
1004
1005  if (graph_data_output_file)
1006    fclose(graph_data_output_file);
1007}
1008
1009void VideoQualityTest::RunWithVideoRenderer(const Params& params) {
1010  params_ = params;
1011  CheckParams();
1012
1013  rtc::scoped_ptr<test::VideoRenderer> local_preview(
1014      test::VideoRenderer::Create("Local Preview", params_.common.width,
1015                                  params_.common.height));
1016  size_t stream_id = params_.ss.selected_stream;
1017  std::string title = "Loopback Video";
1018  if (params_.ss.streams.size() > 1) {
1019    std::ostringstream s;
1020    s << stream_id;
1021    title += " - Stream #" + s.str();
1022  }
1023
1024  rtc::scoped_ptr<test::VideoRenderer> loopback_video(
1025      test::VideoRenderer::Create(title.c_str(),
1026                                  params_.ss.streams[stream_id].width,
1027                                  params_.ss.streams[stream_id].height));
1028
1029  // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
1030  // match the full stack tests.
1031  Call::Config call_config;
1032  call_config.bitrate_config = params_.common.call_bitrate_config;
1033  rtc::scoped_ptr<Call> call(Call::Create(call_config));
1034
1035  test::LayerFilteringTransport transport(
1036      params.pipe, call.get(), kPayloadTypeVP8, kPayloadTypeVP9,
1037      params.common.selected_tl, params_.ss.selected_sl);
1038  // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
1039  // least share as much code as possible. That way this test would also match
1040  // the full stack tests better.
1041  transport.SetReceiver(call->Receiver());
1042
1043  SetupCommon(&transport, &transport);
1044
1045  video_send_config_.local_renderer = local_preview.get();
1046  video_receive_configs_[stream_id].renderer = loopback_video.get();
1047
1048  if (params_.screenshare.enabled)
1049    SetupScreenshare();
1050
1051  video_send_stream_ =
1052      call->CreateVideoSendStream(video_send_config_, video_encoder_config_);
1053  VideoReceiveStream* receive_stream =
1054      call->CreateVideoReceiveStream(video_receive_configs_[stream_id]);
1055  CreateCapturer(video_send_stream_->Input());
1056
1057  receive_stream->Start();
1058  video_send_stream_->Start();
1059  capturer_->Start();
1060
1061  test::PressEnterToContinue();
1062
1063  capturer_->Stop();
1064  video_send_stream_->Stop();
1065  receive_stream->Stop();
1066
1067  call->DestroyVideoReceiveStream(receive_stream);
1068  call->DestroyVideoSendStream(video_send_stream_);
1069
1070  transport.StopSending();
1071}
1072
1073}  // namespace webrtc
1074