1// Copyright (c) 2013 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/browser/media/capture/desktop_capture_device.h"
6
7#include "base/bind.h"
8#include "base/location.h"
9#include "base/logging.h"
10#include "base/metrics/histogram.h"
11#include "base/strings/string_number_conversions.h"
12#include "base/synchronization/lock.h"
13#include "base/threading/thread.h"
14#include "base/timer/timer.h"
15#include "content/browser/media/capture/desktop_capture_device_uma_types.h"
16#include "content/public/browser/browser_thread.h"
17#include "content/public/browser/desktop_media_id.h"
18#include "content/public/browser/power_save_blocker.h"
19#include "media/base/video_util.h"
20#include "third_party/libyuv/include/libyuv/scale_argb.h"
21#include "third_party/webrtc/modules/desktop_capture/desktop_and_cursor_composer.h"
22#include "third_party/webrtc/modules/desktop_capture/desktop_capture_options.h"
23#include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
24#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
25#include "third_party/webrtc/modules/desktop_capture/mouse_cursor_monitor.h"
26#include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
27#include "third_party/webrtc/modules/desktop_capture/window_capturer.h"
28
29namespace content {
30
31namespace {
32
33// Maximum CPU time percentage of a single core that can be consumed for desktop
34// capturing. This means that on systems where screen scraping is slow we may
35// need to capture at frame rate lower than requested. This is necessary to keep
36// UI responsive.
37const int kMaximumCpuConsumptionPercentage = 50;
38
39webrtc::DesktopRect ComputeLetterboxRect(
40    const webrtc::DesktopSize& max_size,
41    const webrtc::DesktopSize& source_size) {
42  gfx::Rect result = media::ComputeLetterboxRegion(
43      gfx::Rect(0, 0, max_size.width(), max_size.height()),
44      gfx::Size(source_size.width(), source_size.height()));
45  return webrtc::DesktopRect::MakeLTRB(
46      result.x(), result.y(), result.right(), result.bottom());
47}
48
49}  // namespace
50
51class DesktopCaptureDevice::Core : public webrtc::DesktopCapturer::Callback {
52 public:
53  Core(scoped_refptr<base::SingleThreadTaskRunner> task_runner,
54       scoped_ptr<webrtc::DesktopCapturer> capturer,
55       DesktopMediaID::Type type);
56  virtual ~Core();
57
58  // Implementation of VideoCaptureDevice methods.
59  void AllocateAndStart(const media::VideoCaptureParams& params,
60                        scoped_ptr<Client> client);
61
62  void SetNotificationWindowId(gfx::NativeViewId window_id);
63
64 private:
65
66  // webrtc::DesktopCapturer::Callback interface
67  virtual webrtc::SharedMemory* CreateSharedMemory(size_t size) OVERRIDE;
68  virtual void OnCaptureCompleted(webrtc::DesktopFrame* frame) OVERRIDE;
69
70  // Chooses new output properties based on the supplied source size and the
71  // properties requested to Allocate(), and dispatches OnFrameInfo[Changed]
72  // notifications.
73  void RefreshCaptureFormat(const webrtc::DesktopSize& frame_size);
74
75  // Method that is scheduled on |task_runner_| to be called on regular interval
76  // to capture a frame.
77  void OnCaptureTimer();
78
79  // Captures a frame and schedules timer for the next one.
80  void CaptureFrameAndScheduleNext();
81
82  // Captures a single frame.
83  void DoCapture();
84
85  // Task runner used for capturing operations.
86  scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
87
88  // The underlying DesktopCapturer instance used to capture frames.
89  scoped_ptr<webrtc::DesktopCapturer> desktop_capturer_;
90
91  // The device client which proxies device events to the controller. Accessed
92  // on the task_runner_ thread.
93  scoped_ptr<Client> client_;
94
95  // Requested video capture format (width, height, frame rate, etc).
96  media::VideoCaptureParams requested_params_;
97
98  // Actual video capture format being generated.
99  media::VideoCaptureFormat capture_format_;
100
101  // Size of frame most recently captured from the source.
102  webrtc::DesktopSize previous_frame_size_;
103
104  // DesktopFrame into which captured frames are down-scaled and/or letterboxed,
105  // depending upon the caller's requested capture capabilities. If frames can
106  // be returned to the caller directly then this is NULL.
107  scoped_ptr<webrtc::DesktopFrame> output_frame_;
108
109  // Sub-rectangle of |output_frame_| into which the source will be scaled
110  // and/or letterboxed.
111  webrtc::DesktopRect output_rect_;
112
113  // Timer used to capture the frame.
114  base::OneShotTimer<Core> capture_timer_;
115
116  // True when waiting for |desktop_capturer_| to capture current frame.
117  bool capture_in_progress_;
118
119  // True if the first capture call has returned. Used to log the first capture
120  // result.
121  bool first_capture_returned_;
122
123  // The type of the capturer.
124  DesktopMediaID::Type capturer_type_;
125
126  scoped_ptr<webrtc::BasicDesktopFrame> black_frame_;
127
128  // TODO(jiayl): Remove power_save_blocker_ when there is an API to keep the
129  // screen from sleeping for the drive-by web.
130  scoped_ptr<PowerSaveBlocker> power_save_blocker_;
131
132  DISALLOW_COPY_AND_ASSIGN(Core);
133};
134
135DesktopCaptureDevice::Core::Core(
136    scoped_refptr<base::SingleThreadTaskRunner> task_runner,
137    scoped_ptr<webrtc::DesktopCapturer> capturer,
138    DesktopMediaID::Type type)
139    : task_runner_(task_runner),
140      desktop_capturer_(capturer.Pass()),
141      capture_in_progress_(false),
142      first_capture_returned_(false),
143      capturer_type_(type) {
144}
145
146DesktopCaptureDevice::Core::~Core() {
147  DCHECK(task_runner_->BelongsToCurrentThread());
148  client_.reset();
149  output_frame_.reset();
150  previous_frame_size_.set(0, 0);
151  desktop_capturer_.reset();
152}
153
154void DesktopCaptureDevice::Core::AllocateAndStart(
155    const media::VideoCaptureParams& params,
156    scoped_ptr<Client> client) {
157  DCHECK(task_runner_->BelongsToCurrentThread());
158  DCHECK_GT(params.requested_format.frame_size.GetArea(), 0);
159  DCHECK_GT(params.requested_format.frame_rate, 0);
160  DCHECK(desktop_capturer_);
161  DCHECK(client.get());
162  DCHECK(!client_.get());
163
164  client_ = client.Pass();
165  requested_params_ = params;
166
167  capture_format_ = requested_params_.requested_format;
168
169  // This capturer always outputs ARGB, non-interlaced.
170  capture_format_.pixel_format = media::PIXEL_FORMAT_ARGB;
171
172  power_save_blocker_.reset(PowerSaveBlocker::Create(
173      PowerSaveBlocker::kPowerSaveBlockPreventDisplaySleep,
174      "DesktopCaptureDevice is running").release());
175
176  desktop_capturer_->Start(this);
177
178  CaptureFrameAndScheduleNext();
179}
180
181void DesktopCaptureDevice::Core::SetNotificationWindowId(
182    gfx::NativeViewId window_id) {
183  DCHECK(task_runner_->BelongsToCurrentThread());
184  DCHECK(window_id);
185  desktop_capturer_->SetExcludedWindow(window_id);
186}
187
188webrtc::SharedMemory*
189DesktopCaptureDevice::Core::CreateSharedMemory(size_t size) {
190  return NULL;
191}
192
193void DesktopCaptureDevice::Core::OnCaptureCompleted(
194    webrtc::DesktopFrame* frame) {
195  DCHECK(task_runner_->BelongsToCurrentThread());
196  DCHECK(capture_in_progress_);
197
198  if (!first_capture_returned_) {
199    first_capture_returned_ = true;
200    if (capturer_type_ == DesktopMediaID::TYPE_SCREEN) {
201      IncrementDesktopCaptureCounter(frame ? FIRST_SCREEN_CAPTURE_SUCCEEDED
202                                           : FIRST_SCREEN_CAPTURE_FAILED);
203    } else {
204      IncrementDesktopCaptureCounter(frame ? FIRST_WINDOW_CAPTURE_SUCCEEDED
205                                           : FIRST_WINDOW_CAPTURE_FAILED);
206    }
207  }
208
209  capture_in_progress_ = false;
210
211  if (!frame) {
212    std::string log("Failed to capture a frame.");
213    LOG(ERROR) << log;
214    client_->OnError(log);
215    return;
216  }
217
218  if (!client_)
219    return;
220
221  base::TimeDelta capture_time(
222      base::TimeDelta::FromMilliseconds(frame->capture_time_ms()));
223
224  // The two UMA_ blocks must be put in its own scope since it creates a static
225  // variable which expected constant histogram name.
226  if (capturer_type_ == DesktopMediaID::TYPE_SCREEN) {
227    UMA_HISTOGRAM_TIMES(kUmaScreenCaptureTime, capture_time);
228  } else {
229    UMA_HISTOGRAM_TIMES(kUmaWindowCaptureTime, capture_time);
230  }
231
232  scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
233
234  // On OSX We receive a 1x1 frame when the shared window is minimized. It
235  // cannot be subsampled to I420 and will be dropped downstream. So we replace
236  // it with a black frame to avoid the video appearing frozen at the last
237  // frame.
238  if (frame->size().width() == 1 || frame->size().height() == 1) {
239    if (!black_frame_.get()) {
240      black_frame_.reset(
241          new webrtc::BasicDesktopFrame(
242              webrtc::DesktopSize(capture_format_.frame_size.width(),
243                                  capture_format_.frame_size.height())));
244      memset(black_frame_->data(),
245             0,
246             black_frame_->stride() * black_frame_->size().height());
247    }
248    owned_frame.reset();
249    frame = black_frame_.get();
250  }
251
252  // Handle initial frame size and size changes.
253  RefreshCaptureFormat(frame->size());
254
255  webrtc::DesktopSize output_size(capture_format_.frame_size.width(),
256                                  capture_format_.frame_size.height());
257  size_t output_bytes = output_size.width() * output_size.height() *
258      webrtc::DesktopFrame::kBytesPerPixel;
259  const uint8_t* output_data = NULL;
260  scoped_ptr<uint8_t[]> flipped_frame_buffer;
261
262  if (frame->size().equals(output_size)) {
263    // If the captured frame matches the output size, we can return the pixel
264    // data directly, without scaling.
265    output_data = frame->data();
266
267    // If the |frame| generated by the screen capturer is inverted then we need
268    // to flip |frame|.
269    // This happens only on a specific platform. Refer to crbug.com/306876.
270    if (frame->stride() < 0) {
271      int height = frame->size().height();
272      int bytes_per_row =
273          frame->size().width() * webrtc::DesktopFrame::kBytesPerPixel;
274      flipped_frame_buffer.reset(new uint8_t[output_bytes]);
275      uint8_t* dest = flipped_frame_buffer.get();
276      for (int row = 0; row < height; ++row) {
277        memcpy(dest, output_data, bytes_per_row);
278        dest += bytes_per_row;
279        output_data += frame->stride();
280      }
281      output_data = flipped_frame_buffer.get();
282    }
283  } else {
284    // Otherwise we need to down-scale and/or letterbox to the target format.
285
286    // Allocate a buffer of the correct size to scale the frame into.
287    // |output_frame_| is cleared whenever |output_rect_| changes, so we don't
288    // need to worry about clearing out stale pixel data in letterboxed areas.
289    if (!output_frame_) {
290      output_frame_.reset(new webrtc::BasicDesktopFrame(output_size));
291      memset(output_frame_->data(), 0, output_bytes);
292    }
293    DCHECK(output_frame_->size().equals(output_size));
294
295    // TODO(wez): Optimize this to scale only changed portions of the output,
296    // using ARGBScaleClip().
297    uint8_t* output_rect_data = output_frame_->data() +
298        output_frame_->stride() * output_rect_.top() +
299        webrtc::DesktopFrame::kBytesPerPixel * output_rect_.left();
300    libyuv::ARGBScale(frame->data(), frame->stride(),
301                      frame->size().width(), frame->size().height(),
302                      output_rect_data, output_frame_->stride(),
303                      output_rect_.width(), output_rect_.height(),
304                      libyuv::kFilterBilinear);
305    output_data = output_frame_->data();
306  }
307
308  client_->OnIncomingCapturedData(
309      output_data, output_bytes, capture_format_, 0, base::TimeTicks::Now());
310}
311
312void DesktopCaptureDevice::Core::RefreshCaptureFormat(
313    const webrtc::DesktopSize& frame_size) {
314  if (previous_frame_size_.equals(frame_size))
315    return;
316
317  // Clear the output frame, if any, since it will either need resizing, or
318  // clearing of stale data in letterbox areas, anyway.
319  output_frame_.reset();
320
321  if (previous_frame_size_.is_empty() ||
322      requested_params_.resolution_change_policy ==
323      media::RESOLUTION_POLICY_DYNAMIC_WITHIN_LIMIT) {
324    // If this is the first frame, or the receiver supports variable resolution
325    // then determine the output size by treating the requested width & height
326    // as maxima.
327    if (frame_size.width() >
328            requested_params_.requested_format.frame_size.width() ||
329        frame_size.height() >
330            requested_params_.requested_format.frame_size.height()) {
331      output_rect_ = ComputeLetterboxRect(
332          webrtc::DesktopSize(
333              requested_params_.requested_format.frame_size.width(),
334              requested_params_.requested_format.frame_size.height()),
335          frame_size);
336      output_rect_.Translate(-output_rect_.left(), -output_rect_.top());
337    } else {
338      output_rect_ = webrtc::DesktopRect::MakeSize(frame_size);
339    }
340    capture_format_.frame_size.SetSize(output_rect_.width(),
341                                       output_rect_.height());
342  } else {
343    // Otherwise the output frame size cannot change, so just scale and
344    // letterbox.
345    output_rect_ = ComputeLetterboxRect(
346        webrtc::DesktopSize(capture_format_.frame_size.width(),
347                            capture_format_.frame_size.height()),
348        frame_size);
349  }
350
351  previous_frame_size_ = frame_size;
352}
353
354void DesktopCaptureDevice::Core::OnCaptureTimer() {
355  DCHECK(task_runner_->BelongsToCurrentThread());
356
357  if (!client_)
358    return;
359
360  CaptureFrameAndScheduleNext();
361}
362
363void DesktopCaptureDevice::Core::CaptureFrameAndScheduleNext() {
364  DCHECK(task_runner_->BelongsToCurrentThread());
365
366  base::TimeTicks started_time = base::TimeTicks::Now();
367  DoCapture();
368  base::TimeDelta last_capture_duration = base::TimeTicks::Now() - started_time;
369
370  // Limit frame-rate to reduce CPU consumption.
371  base::TimeDelta capture_period = std::max(
372    (last_capture_duration * 100) / kMaximumCpuConsumptionPercentage,
373    base::TimeDelta::FromSeconds(1) / capture_format_.frame_rate);
374
375  // Schedule a task for the next frame.
376  capture_timer_.Start(FROM_HERE, capture_period - last_capture_duration,
377                       this, &Core::OnCaptureTimer);
378}
379
380void DesktopCaptureDevice::Core::DoCapture() {
381  DCHECK(task_runner_->BelongsToCurrentThread());
382  DCHECK(!capture_in_progress_);
383
384  capture_in_progress_ = true;
385  desktop_capturer_->Capture(webrtc::DesktopRegion());
386
387  // Currently only synchronous implementations of DesktopCapturer are
388  // supported.
389  DCHECK(!capture_in_progress_);
390}
391
392// static
393scoped_ptr<media::VideoCaptureDevice> DesktopCaptureDevice::Create(
394    const DesktopMediaID& source) {
395  webrtc::DesktopCaptureOptions options =
396      webrtc::DesktopCaptureOptions::CreateDefault();
397  // Leave desktop effects enabled during WebRTC captures.
398  options.set_disable_effects(false);
399
400  scoped_ptr<webrtc::DesktopCapturer> capturer;
401
402  switch (source.type) {
403    case DesktopMediaID::TYPE_SCREEN: {
404#if defined(OS_WIN)
405      options.set_allow_use_magnification_api(true);
406#endif
407      scoped_ptr<webrtc::ScreenCapturer> screen_capturer(
408          webrtc::ScreenCapturer::Create(options));
409      if (screen_capturer && screen_capturer->SelectScreen(source.id)) {
410        capturer.reset(new webrtc::DesktopAndCursorComposer(
411            screen_capturer.release(),
412            webrtc::MouseCursorMonitor::CreateForScreen(options, source.id)));
413        IncrementDesktopCaptureCounter(SCREEN_CAPTURER_CREATED);
414      }
415      break;
416    }
417
418    case DesktopMediaID::TYPE_WINDOW: {
419      scoped_ptr<webrtc::WindowCapturer> window_capturer(
420          webrtc::WindowCapturer::Create(options));
421      if (window_capturer && window_capturer->SelectWindow(source.id)) {
422        window_capturer->BringSelectedWindowToFront();
423        capturer.reset(new webrtc::DesktopAndCursorComposer(
424            window_capturer.release(),
425            webrtc::MouseCursorMonitor::CreateForWindow(options, source.id)));
426        IncrementDesktopCaptureCounter(WINDOW_CAPTURER_CREATED);
427      }
428      break;
429    }
430
431    default: {
432      NOTREACHED();
433    }
434  }
435
436  scoped_ptr<media::VideoCaptureDevice> result;
437  if (capturer)
438    result.reset(new DesktopCaptureDevice(capturer.Pass(), source.type));
439
440  return result.Pass();
441}
442
443DesktopCaptureDevice::~DesktopCaptureDevice() {
444  DCHECK(!core_);
445}
446
447void DesktopCaptureDevice::AllocateAndStart(
448    const media::VideoCaptureParams& params,
449    scoped_ptr<Client> client) {
450  thread_.message_loop_proxy()->PostTask(
451      FROM_HERE,
452      base::Bind(&Core::AllocateAndStart, base::Unretained(core_.get()), params,
453                 base::Passed(&client)));
454}
455
456void DesktopCaptureDevice::StopAndDeAllocate() {
457  if (core_) {
458    thread_.message_loop_proxy()->DeleteSoon(FROM_HERE, core_.release());
459    thread_.Stop();
460  }
461}
462
463void DesktopCaptureDevice::SetNotificationWindowId(
464    gfx::NativeViewId window_id) {
465  thread_.message_loop_proxy()->PostTask(
466      FROM_HERE,
467      base::Bind(&Core::SetNotificationWindowId, base::Unretained(core_.get()),
468                 window_id));
469}
470
471DesktopCaptureDevice::DesktopCaptureDevice(
472    scoped_ptr<webrtc::DesktopCapturer> capturer,
473    DesktopMediaID::Type type)
474    : thread_("desktopCaptureThread") {
475#if defined(OS_WIN)
476  // On Windows the thread must be a UI thread.
477  base::MessageLoop::Type thread_type = base::MessageLoop::TYPE_UI;
478#else
479  base::MessageLoop::Type thread_type = base::MessageLoop::TYPE_DEFAULT;
480#endif
481
482  thread_.StartWithOptions(base::Thread::Options(thread_type, 0));
483
484  core_.reset(new Core(thread_.message_loop_proxy(), capturer.Pass(), type));
485}
486
487}  // namespace content
488