videocapturer.cc revision dd4a8da68ada4c91653271462b21a23b0319ef66
1/*
2 * libjingle
3 * Copyright 2010 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28// Implementation file of class VideoCapturer.
29
30#include "talk/media/base/videocapturer.h"
31
32#include <algorithm>
33
34#include "libyuv/scale_argb.h"
35#include "talk/media/base/videoframefactory.h"
36#include "talk/media/base/videoprocessor.h"
37#include "webrtc/base/common.h"
38#include "webrtc/base/logging.h"
39#include "webrtc/base/systeminfo.h"
40
41#if defined(HAVE_WEBRTC_VIDEO)
42#include "talk/media/webrtc/webrtcvideoframe.h"
43#include "talk/media/webrtc/webrtcvideoframefactory.h"
44#endif  // HAVE_WEBRTC_VIDEO
45
46namespace cricket {
47
48namespace {
49
50// TODO(thorcarpenter): This is a BIG hack to flush the system with black
51// frames. Frontends should coordinate to update the video state of a muted
52// user. When all frontends to this consider removing the black frame business.
53const int kNumBlackFramesOnMute = 30;
54
55// MessageHandler constants.
56enum {
57  MSG_DO_PAUSE = 0,
58  MSG_DO_UNPAUSE,
59  MSG_STATE_CHANGE
60};
61
62static const int64 kMaxDistance = ~(static_cast<int64>(1) << 63);
63#ifdef LINUX
64static const int kYU12Penalty = 16;  // Needs to be higher than MJPG index.
65#endif
66static const int kDefaultScreencastFps = 5;
67typedef rtc::TypedMessageData<CaptureState> StateChangeParams;
68
69// Limit stats data collections to ~20 seconds of 30fps data before dropping
70// old data in case stats aren't reset for long periods of time.
71static const size_t kMaxAccumulatorSize = 600;
72
73}  // namespace
74
75/////////////////////////////////////////////////////////////////////
76// Implementation of struct CapturedFrame
77/////////////////////////////////////////////////////////////////////
78CapturedFrame::CapturedFrame()
79    : width(0),
80      height(0),
81      fourcc(0),
82      pixel_width(0),
83      pixel_height(0),
84      elapsed_time(0),
85      time_stamp(0),
86      data_size(0),
87      rotation(0),
88      data(NULL) {}
89
90// TODO(fbarchard): Remove this function once lmimediaengine stops using it.
91bool CapturedFrame::GetDataSize(uint32* size) const {
92  if (!size || data_size == CapturedFrame::kUnknownDataSize) {
93    return false;
94  }
95  *size = data_size;
96  return true;
97}
98
99webrtc::VideoRotation CapturedFrame::GetRotation() const {
100  ASSERT(rotation == 0 || rotation == 90 || rotation == 180 || rotation == 270);
101  return static_cast<webrtc::VideoRotation>(rotation);
102}
103
104/////////////////////////////////////////////////////////////////////
105// Implementation of class VideoCapturer
106/////////////////////////////////////////////////////////////////////
107VideoCapturer::VideoCapturer()
108    : thread_(rtc::Thread::Current()),
109      adapt_frame_drops_data_(kMaxAccumulatorSize),
110      effect_frame_drops_data_(kMaxAccumulatorSize),
111      frame_time_data_(kMaxAccumulatorSize),
112      apply_rotation_(true) {
113  Construct();
114}
115
116VideoCapturer::VideoCapturer(rtc::Thread* thread)
117    : thread_(thread),
118      adapt_frame_drops_data_(kMaxAccumulatorSize),
119      effect_frame_drops_data_(kMaxAccumulatorSize),
120      frame_time_data_(kMaxAccumulatorSize),
121      apply_rotation_(true) {
122  Construct();
123}
124
125void VideoCapturer::Construct() {
126  ClearAspectRatio();
127  enable_camera_list_ = false;
128  square_pixel_aspect_ratio_ = false;
129  capture_state_ = CS_STOPPED;
130  SignalFrameCaptured.connect(this, &VideoCapturer::OnFrameCaptured);
131  scaled_width_ = 0;
132  scaled_height_ = 0;
133  screencast_max_pixels_ = 0;
134  muted_ = false;
135  black_frame_count_down_ = kNumBlackFramesOnMute;
136  enable_video_adapter_ = true;
137  adapt_frame_drops_ = 0;
138  effect_frame_drops_ = 0;
139  previous_frame_time_ = 0.0;
140#ifdef HAVE_WEBRTC_VIDEO
141  // There are lots of video capturers out there that don't call
142  // set_frame_factory.  We can either go change all of them, or we
143  // can set this default.
144  // TODO(pthatcher): Remove this hack and require the frame factory
145  // to be passed in the constructor.
146  set_frame_factory(new WebRtcVideoFrameFactory());
147#endif
148}
149
150const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
151  return &filtered_supported_formats_;
152}
153
154bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
155  previous_frame_time_ = frame_length_time_reporter_.TimerNow();
156  CaptureState result = Start(capture_format);
157  const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
158  if (!success) {
159    return false;
160  }
161  if (result == CS_RUNNING) {
162    SetCaptureState(result);
163  }
164  return true;
165}
166
167void VideoCapturer::UpdateAspectRatio(int ratio_w, int ratio_h) {
168  if (ratio_w == 0 || ratio_h == 0) {
169    LOG(LS_WARNING) << "UpdateAspectRatio ignored invalid ratio: "
170                    << ratio_w << "x" << ratio_h;
171    return;
172  }
173  ratio_w_ = ratio_w;
174  ratio_h_ = ratio_h;
175}
176
177void VideoCapturer::ClearAspectRatio() {
178  ratio_w_ = 0;
179  ratio_h_ = 0;
180}
181
182// Override this to have more control of how your device is started/stopped.
183bool VideoCapturer::Pause(bool pause) {
184  if (pause) {
185    if (capture_state() == CS_PAUSED) {
186      return true;
187    }
188    bool is_running = capture_state() == CS_STARTING ||
189        capture_state() == CS_RUNNING;
190    if (!is_running) {
191      LOG(LS_ERROR) << "Cannot pause a stopped camera.";
192      return false;
193    }
194    LOG(LS_INFO) << "Pausing a camera.";
195    rtc::scoped_ptr<VideoFormat> capture_format_when_paused(
196        capture_format_ ? new VideoFormat(*capture_format_) : NULL);
197    Stop();
198    SetCaptureState(CS_PAUSED);
199    // If you override this function be sure to restore the capture format
200    // after calling Stop().
201    SetCaptureFormat(capture_format_when_paused.get());
202  } else {  // Unpause.
203    if (capture_state() != CS_PAUSED) {
204      LOG(LS_WARNING) << "Cannot unpause a camera that hasn't been paused.";
205      return false;
206    }
207    if (!capture_format_) {
208      LOG(LS_ERROR) << "Missing capture_format_, cannot unpause a camera.";
209      return false;
210    }
211    if (muted_) {
212      LOG(LS_WARNING) << "Camera cannot be unpaused while muted.";
213      return false;
214    }
215    LOG(LS_INFO) << "Unpausing a camera.";
216    if (!Start(*capture_format_)) {
217      LOG(LS_ERROR) << "Camera failed to start when unpausing.";
218      return false;
219    }
220  }
221  return true;
222}
223
224bool VideoCapturer::Restart(const VideoFormat& capture_format) {
225  if (!IsRunning()) {
226    return StartCapturing(capture_format);
227  }
228
229  if (GetCaptureFormat() != NULL && *GetCaptureFormat() == capture_format) {
230    // The reqested format is the same; nothing to do.
231    return true;
232  }
233
234  Stop();
235  return StartCapturing(capture_format);
236}
237
238bool VideoCapturer::MuteToBlackThenPause(bool muted) {
239  if (muted == IsMuted()) {
240    return true;
241  }
242
243  LOG(LS_INFO) << (muted ? "Muting" : "Unmuting") << " this video capturer.";
244  muted_ = muted;  // Do this before calling Pause().
245  if (muted) {
246    // Reset black frame count down.
247    black_frame_count_down_ = kNumBlackFramesOnMute;
248    // Following frames will be overritten with black, then the camera will be
249    // paused.
250    return true;
251  }
252  // Start the camera.
253  thread_->Clear(this, MSG_DO_PAUSE);
254  return Pause(false);
255}
256
257bool VideoCapturer::SetApplyRotation(bool enable) {
258  apply_rotation_ = enable;
259  if (frame_factory_) {
260    frame_factory_->SetApplyRotation(apply_rotation_);
261  }
262  return true;
263}
264
265void VideoCapturer::SetSupportedFormats(
266    const std::vector<VideoFormat>& formats) {
267  supported_formats_ = formats;
268  UpdateFilteredSupportedFormats();
269}
270
271bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
272                                         VideoFormat* best_format) {
273  // TODO(fbarchard): Directly support max_format.
274  UpdateFilteredSupportedFormats();
275  const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
276
277  if (supported_formats->empty()) {
278    return false;
279  }
280  LOG(LS_INFO) << " Capture Requested " << format.ToString();
281  int64 best_distance = kMaxDistance;
282  std::vector<VideoFormat>::const_iterator best = supported_formats->end();
283  std::vector<VideoFormat>::const_iterator i;
284  for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
285    int64 distance = GetFormatDistance(format, *i);
286    // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
287    // relatively bug free.
288    LOG(LS_INFO) << " Supported " << i->ToString() << " distance " << distance;
289    if (distance < best_distance) {
290      best_distance = distance;
291      best = i;
292    }
293  }
294  if (supported_formats->end() == best) {
295    LOG(LS_ERROR) << " No acceptable camera format found";
296    return false;
297  }
298
299  if (best_format) {
300    best_format->width = best->width;
301    best_format->height = best->height;
302    best_format->fourcc = best->fourcc;
303    best_format->interval = best->interval;
304    LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
305                 << best_format->interval << " distance " << best_distance;
306  }
307  return true;
308}
309
310void VideoCapturer::AddVideoProcessor(VideoProcessor* video_processor) {
311  rtc::CritScope cs(&crit_);
312  ASSERT(std::find(video_processors_.begin(), video_processors_.end(),
313                   video_processor) == video_processors_.end());
314  video_processors_.push_back(video_processor);
315}
316
317bool VideoCapturer::RemoveVideoProcessor(VideoProcessor* video_processor) {
318  rtc::CritScope cs(&crit_);
319  VideoProcessors::iterator found = std::find(
320      video_processors_.begin(), video_processors_.end(), video_processor);
321  if (found == video_processors_.end()) {
322    return false;
323  }
324  video_processors_.erase(found);
325  return true;
326}
327
328void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
329  max_format_.reset(new VideoFormat(max_format));
330  LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
331  UpdateFilteredSupportedFormats();
332}
333
334std::string VideoCapturer::ToString(const CapturedFrame* captured_frame) const {
335  std::string fourcc_name = GetFourccName(captured_frame->fourcc) + " ";
336  for (std::string::const_iterator i = fourcc_name.begin();
337       i < fourcc_name.end(); ++i) {
338    // Test character is printable; Avoid isprint() which asserts on negatives.
339    if (*i < 32 || *i >= 127) {
340      fourcc_name = "";
341      break;
342    }
343  }
344
345  std::ostringstream ss;
346  ss << fourcc_name << captured_frame->width << "x" << captured_frame->height
347     << "x" << VideoFormat::IntervalToFpsFloat(captured_frame->elapsed_time);
348  return ss.str();
349}
350
351void VideoCapturer::set_frame_factory(VideoFrameFactory* frame_factory) {
352  frame_factory_.reset(frame_factory);
353  if (frame_factory) {
354    frame_factory->SetApplyRotation(apply_rotation_);
355  }
356}
357
358void VideoCapturer::GetStats(VariableInfo<int>* adapt_drops_stats,
359                             VariableInfo<int>* effect_drops_stats,
360                             VariableInfo<double>* frame_time_stats,
361                             VideoFormat* last_captured_frame_format) {
362  rtc::CritScope cs(&frame_stats_crit_);
363  GetVariableSnapshot(adapt_frame_drops_data_, adapt_drops_stats);
364  GetVariableSnapshot(effect_frame_drops_data_, effect_drops_stats);
365  GetVariableSnapshot(frame_time_data_, frame_time_stats);
366  *last_captured_frame_format = last_captured_frame_format_;
367
368  adapt_frame_drops_data_.Reset();
369  effect_frame_drops_data_.Reset();
370  frame_time_data_.Reset();
371}
372
373void VideoCapturer::OnFrameCaptured(VideoCapturer*,
374                                    const CapturedFrame* captured_frame) {
375  if (muted_) {
376    if (black_frame_count_down_ == 0) {
377      thread_->Post(this, MSG_DO_PAUSE, NULL);
378    } else {
379      --black_frame_count_down_;
380    }
381  }
382
383  if (SignalVideoFrame.is_empty()) {
384    return;
385  }
386
387  // Use a temporary buffer to scale
388  rtc::scoped_ptr<uint8[]> scale_buffer;
389
390  if (IsScreencast()) {
391    int scaled_width, scaled_height;
392    if (screencast_max_pixels_ > 0) {
393      ComputeScaleMaxPixels(captured_frame->width, captured_frame->height,
394          screencast_max_pixels_, &scaled_width, &scaled_height);
395    } else {
396      int desired_screencast_fps = capture_format_.get() ?
397          VideoFormat::IntervalToFps(capture_format_->interval) :
398          kDefaultScreencastFps;
399      ComputeScale(captured_frame->width, captured_frame->height,
400                   desired_screencast_fps, &scaled_width, &scaled_height);
401    }
402
403    if (FOURCC_ARGB == captured_frame->fourcc &&
404        (scaled_width != captured_frame->width ||
405        scaled_height != captured_frame->height)) {
406      if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
407        LOG(LS_INFO) << "Scaling Screencast from "
408                     << captured_frame->width << "x"
409                     << captured_frame->height << " to "
410                     << scaled_width << "x" << scaled_height;
411        scaled_width_ = scaled_width;
412        scaled_height_ = scaled_height;
413      }
414      CapturedFrame* modified_frame =
415          const_cast<CapturedFrame*>(captured_frame);
416      const int modified_frame_size = scaled_width * scaled_height * 4;
417      scale_buffer.reset(new uint8[modified_frame_size]);
418      // Compute new width such that width * height is less than maximum but
419      // maintains original captured frame aspect ratio.
420      // Round down width to multiple of 4 so odd width won't round up beyond
421      // maximum, and so chroma channel is even width to simplify spatial
422      // resampling.
423      libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
424                        captured_frame->width * 4, captured_frame->width,
425                        captured_frame->height,
426                        scale_buffer.get(),
427                        scaled_width * 4, scaled_width, scaled_height,
428                        libyuv::kFilterBilinear);
429      modified_frame->width = scaled_width;
430      modified_frame->height = scaled_height;
431      modified_frame->data_size = scaled_width * 4 * scaled_height;
432      modified_frame->data = scale_buffer.get();
433    }
434  }
435
436  const int kYuy2Bpp = 2;
437  const int kArgbBpp = 4;
438  // TODO(fbarchard): Make a helper function to adjust pixels to square.
439  // TODO(fbarchard): Hook up experiment to scaling.
440  // TODO(fbarchard): Avoid scale and convert if muted.
441  // Temporary buffer is scoped here so it will persist until i420_frame.Init()
442  // makes a copy of the frame, converting to I420.
443  rtc::scoped_ptr<uint8[]> temp_buffer;
444  // YUY2 can be scaled vertically using an ARGB scaler.  Aspect ratio is only
445  // a problem on OSX.  OSX always converts webcams to YUY2 or UYVY.
446  bool can_scale =
447      FOURCC_YUY2 == CanonicalFourCC(captured_frame->fourcc) ||
448      FOURCC_UYVY == CanonicalFourCC(captured_frame->fourcc);
449
450  // If pixels are not square, optionally use vertical scaling to make them
451  // square.  Square pixels simplify the rest of the pipeline, including
452  // effects and rendering.
453  if (can_scale && square_pixel_aspect_ratio_ &&
454      captured_frame->pixel_width != captured_frame->pixel_height) {
455    int scaled_width, scaled_height;
456    // modified_frame points to the captured_frame but with const casted away
457    // so it can be modified.
458    CapturedFrame* modified_frame = const_cast<CapturedFrame*>(captured_frame);
459    // Compute the frame size that makes pixels square pixel aspect ratio.
460    ComputeScaleToSquarePixels(captured_frame->width, captured_frame->height,
461                               captured_frame->pixel_width,
462                               captured_frame->pixel_height,
463                               &scaled_width, &scaled_height);
464
465    if (scaled_width != scaled_width_ || scaled_height != scaled_height_) {
466      LOG(LS_INFO) << "Scaling WebCam from "
467                   << captured_frame->width << "x"
468                   << captured_frame->height << " to "
469                   << scaled_width << "x" << scaled_height
470                   << " for PAR "
471                   << captured_frame->pixel_width << "x"
472                   << captured_frame->pixel_height;
473      scaled_width_ = scaled_width;
474      scaled_height_ = scaled_height;
475    }
476    const int modified_frame_size = scaled_width * scaled_height * kYuy2Bpp;
477    uint8* temp_buffer_data;
478    // Pixels are wide and short; Increasing height. Requires temporary buffer.
479    if (scaled_height > captured_frame->height) {
480      temp_buffer.reset(new uint8[modified_frame_size]);
481      temp_buffer_data = temp_buffer.get();
482    } else {
483      // Pixels are narrow and tall; Decreasing height. Scale will be done
484      // in place.
485      temp_buffer_data = reinterpret_cast<uint8*>(captured_frame->data);
486    }
487
488    // Use ARGBScaler to vertically scale the YUY2 image, adjusting for 16 bpp.
489    libyuv::ARGBScale(reinterpret_cast<const uint8*>(captured_frame->data),
490                      captured_frame->width * kYuy2Bpp,  // Stride for YUY2.
491                      captured_frame->width * kYuy2Bpp / kArgbBpp,  // Width.
492                      abs(captured_frame->height),  // Height.
493                      temp_buffer_data,
494                      scaled_width * kYuy2Bpp,  // Stride for YUY2.
495                      scaled_width * kYuy2Bpp / kArgbBpp,  // Width.
496                      abs(scaled_height),  // New height.
497                      libyuv::kFilterBilinear);
498    modified_frame->width = scaled_width;
499    modified_frame->height = scaled_height;
500    modified_frame->pixel_width = 1;
501    modified_frame->pixel_height = 1;
502    modified_frame->data_size = modified_frame_size;
503    modified_frame->data = temp_buffer_data;
504  }
505
506  // Size to crop captured frame to.  This adjusts the captured frames
507  // aspect ratio to match the final view aspect ratio, considering pixel
508  // aspect ratio and rotation.  The final size may be scaled down by video
509  // adapter to better match ratio_w_ x ratio_h_.
510  // Note that abs() of frame height is passed in, because source may be
511  // inverted, but output will be positive.
512  int cropped_width = captured_frame->width;
513  int cropped_height = captured_frame->height;
514
515  // TODO(fbarchard): Improve logic to pad or crop.
516  // MJPG can crop vertically, but not horizontally.  This logic disables crop.
517  // Alternatively we could pad the image with black, or implement a 2 step
518  // crop.
519  bool can_crop = true;
520  if (captured_frame->fourcc == FOURCC_MJPG) {
521    float cam_aspect = static_cast<float>(captured_frame->width) /
522        static_cast<float>(captured_frame->height);
523    float view_aspect = static_cast<float>(ratio_w_) /
524        static_cast<float>(ratio_h_);
525    can_crop = cam_aspect <= view_aspect;
526  }
527  if (can_crop && !IsScreencast()) {
528    // TODO(ronghuawu): The capturer should always produce the native
529    // resolution and the cropping should be done in downstream code.
530    ComputeCrop(ratio_w_, ratio_h_, captured_frame->width,
531                abs(captured_frame->height), captured_frame->pixel_width,
532                captured_frame->pixel_height, captured_frame->rotation,
533                &cropped_width, &cropped_height);
534  }
535
536  int adapted_width = cropped_width;
537  int adapted_height = cropped_height;
538  if (enable_video_adapter_ && !IsScreencast()) {
539    const VideoFormat adapted_format =
540        video_adapter_.AdaptFrameResolution(cropped_width, cropped_height);
541    if (adapted_format.IsSize0x0()) {
542      // VideoAdapter dropped the frame.
543      ++adapt_frame_drops_;
544      return;
545    }
546    adapted_width = adapted_format.width;
547    adapted_height = adapted_format.height;
548  }
549
550  if (!frame_factory_) {
551    LOG(LS_ERROR) << "No video frame factory.";
552    return;
553  }
554
555  rtc::scoped_ptr<VideoFrame> adapted_frame(
556      frame_factory_->CreateAliasedFrame(captured_frame,
557                                         cropped_width, cropped_height,
558                                         adapted_width, adapted_height));
559
560  if (!adapted_frame) {
561    // TODO(fbarchard): LOG more information about captured frame attributes.
562    LOG(LS_ERROR) << "Couldn't convert to I420! "
563                  << "From " << ToString(captured_frame) << " To "
564                  << cropped_width << " x " << cropped_height;
565    return;
566  }
567
568  if (!muted_ && !ApplyProcessors(adapted_frame.get())) {
569    // Processor dropped the frame.
570    ++effect_frame_drops_;
571    return;
572  }
573  if (muted_ || (enable_video_adapter_ && video_adapter_.IsBlackOutput())) {
574    // TODO(pthatcher): Use frame_factory_->CreateBlackFrame() instead.
575    adapted_frame->SetToBlack();
576  }
577  SignalVideoFrame(this, adapted_frame.get());
578
579  UpdateStats(captured_frame);
580}
581
582void VideoCapturer::SetCaptureState(CaptureState state) {
583  if (state == capture_state_) {
584    // Don't trigger a state changed callback if the state hasn't changed.
585    return;
586  }
587  StateChangeParams* state_params = new StateChangeParams(state);
588  capture_state_ = state;
589  thread_->Post(this, MSG_STATE_CHANGE, state_params);
590}
591
592void VideoCapturer::OnMessage(rtc::Message* message) {
593  switch (message->message_id) {
594    case MSG_STATE_CHANGE: {
595      rtc::scoped_ptr<StateChangeParams> p(
596          static_cast<StateChangeParams*>(message->pdata));
597      SignalStateChange(this, p->data());
598      break;
599    }
600    case MSG_DO_PAUSE: {
601      Pause(true);
602      break;
603    }
604    case MSG_DO_UNPAUSE: {
605      Pause(false);
606      break;
607    }
608    default: {
609      ASSERT(false);
610    }
611  }
612}
613
614// Get the distance between the supported and desired formats.
615// Prioritization is done according to this algorithm:
616// 1) Width closeness. If not same, we prefer wider.
617// 2) Height closeness. If not same, we prefer higher.
618// 3) Framerate closeness. If not same, we prefer faster.
619// 4) Compression. If desired format has a specific fourcc, we need exact match;
620//                otherwise, we use preference.
621int64 VideoCapturer::GetFormatDistance(const VideoFormat& desired,
622                                       const VideoFormat& supported) {
623  int64 distance = kMaxDistance;
624
625  // Check fourcc.
626  uint32 supported_fourcc = CanonicalFourCC(supported.fourcc);
627  int64 delta_fourcc = kMaxDistance;
628  if (FOURCC_ANY == desired.fourcc) {
629    // Any fourcc is OK for the desired. Use preference to find best fourcc.
630    std::vector<uint32> preferred_fourccs;
631    if (!GetPreferredFourccs(&preferred_fourccs)) {
632      return distance;
633    }
634
635    for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
636      if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
637        delta_fourcc = i;
638#ifdef LINUX
639        // For HD avoid YU12 which is a software conversion and has 2 bugs
640        // b/7326348 b/6960899.  Reenable when fixed.
641        if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
642                                        supported_fourcc == FOURCC_YV12)) {
643          delta_fourcc += kYU12Penalty;
644        }
645#endif
646        break;
647      }
648    }
649  } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
650    delta_fourcc = 0;  // Need exact match.
651  }
652
653  if (kMaxDistance == delta_fourcc) {
654    // Failed to match fourcc.
655    return distance;
656  }
657
658  // Check resolution and fps.
659  int desired_width = desired.width;
660  int desired_height = desired.height;
661  int64 delta_w = supported.width - desired_width;
662  float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
663  float delta_fps =
664      supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
665  // Check height of supported height compared to height we would like it to be.
666  int64 aspect_h =
667      desired_width ? supported.width * desired_height / desired_width
668                    : desired_height;
669  int64 delta_h = supported.height - aspect_h;
670
671  distance = 0;
672  // Set high penalty if the supported format is lower than the desired format.
673  // 3x means we would prefer down to down to 3/4, than up to double.
674  // But we'd prefer up to double than down to 1/2.  This is conservative,
675  // strongly avoiding going down in resolution, similar to
676  // the old method, but not completely ruling it out in extreme situations.
677  // It also ignores framerate, which is often very low at high resolutions.
678  // TODO(fbarchard): Improve logic to use weighted factors.
679  static const int kDownPenalty = -3;
680  if (delta_w < 0) {
681    delta_w = delta_w * kDownPenalty;
682  }
683  if (delta_h < 0) {
684    delta_h = delta_h * kDownPenalty;
685  }
686  // Require camera fps to be at least 80% of what is requested if resolution
687  // matches.
688  // Require camera fps to be at least 96% of what is requested, or higher,
689  // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
690  if (delta_fps < 0) {
691    float min_desirable_fps = delta_w ?
692    VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
693    VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
694    delta_fps = -delta_fps;
695    if (supported_fps < min_desirable_fps) {
696      distance |= static_cast<int64>(1) << 62;
697    } else {
698      distance |= static_cast<int64>(1) << 15;
699    }
700  }
701  int64 idelta_fps = static_cast<int>(delta_fps);
702
703  // 12 bits for width and height and 8 bits for fps and fourcc.
704  distance |=
705      (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
706
707  return distance;
708}
709
710bool VideoCapturer::ApplyProcessors(VideoFrame* video_frame) {
711  bool drop_frame = false;
712  rtc::CritScope cs(&crit_);
713  for (VideoProcessors::iterator iter = video_processors_.begin();
714       iter != video_processors_.end(); ++iter) {
715    (*iter)->OnFrame(kDummyVideoSsrc, video_frame, &drop_frame);
716    if (drop_frame) {
717      return false;
718    }
719  }
720  return true;
721}
722
723void VideoCapturer::UpdateFilteredSupportedFormats() {
724  filtered_supported_formats_.clear();
725  filtered_supported_formats_ = supported_formats_;
726  if (!max_format_) {
727    return;
728  }
729  std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
730  while (iter != filtered_supported_formats_.end()) {
731    if (ShouldFilterFormat(*iter)) {
732      iter = filtered_supported_formats_.erase(iter);
733    } else {
734      ++iter;
735    }
736  }
737  if (filtered_supported_formats_.empty()) {
738    // The device only captures at resolutions higher than |max_format_| this
739    // indicates that |max_format_| should be ignored as it is better to capture
740    // at too high a resolution than to not capture at all.
741    filtered_supported_formats_ = supported_formats_;
742  }
743}
744
745bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
746  if (!enable_camera_list_) {
747    return false;
748  }
749  return format.width > max_format_->width ||
750         format.height > max_format_->height;
751}
752
753void VideoCapturer::UpdateStats(const CapturedFrame* captured_frame) {
754  // Update stats protected from fetches from different thread.
755  rtc::CritScope cs(&frame_stats_crit_);
756
757  last_captured_frame_format_.width = captured_frame->width;
758  last_captured_frame_format_.height = captured_frame->height;
759  // TODO(ronghuawu): Useful to report interval as well?
760  last_captured_frame_format_.interval = 0;
761  last_captured_frame_format_.fourcc = captured_frame->fourcc;
762
763  double time_now = frame_length_time_reporter_.TimerNow();
764  if (previous_frame_time_ != 0.0) {
765    adapt_frame_drops_data_.AddSample(adapt_frame_drops_);
766    effect_frame_drops_data_.AddSample(effect_frame_drops_);
767    frame_time_data_.AddSample(time_now - previous_frame_time_);
768  }
769  previous_frame_time_ = time_now;
770  effect_frame_drops_ = 0;
771  adapt_frame_drops_ = 0;
772}
773
774template<class T>
775void VideoCapturer::GetVariableSnapshot(
776    const rtc::RollingAccumulator<T>& data,
777    VariableInfo<T>* stats) {
778  stats->max_val = data.ComputeMax();
779  stats->mean = data.ComputeMean();
780  stats->min_val = data.ComputeMin();
781  stats->variance = data.ComputeVariance();
782}
783
784}  // namespace cricket
785