1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/video_engine/vie_capturer.h"
12
13#include "webrtc/common_video/interface/texture_video_frame.h"
14#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
15#include "webrtc/modules/interface/module_common_types.h"
16#include "webrtc/modules/utility/interface/process_thread.h"
17#include "webrtc/modules/video_capture/include/video_capture_factory.h"
18#include "webrtc/modules/video_processing/main/interface/video_processing.h"
19#include "webrtc/modules/video_render/include/video_render_defines.h"
20#include "webrtc/system_wrappers/interface/clock.h"
21#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
22#include "webrtc/system_wrappers/interface/event_wrapper.h"
23#include "webrtc/system_wrappers/interface/logging.h"
24#include "webrtc/system_wrappers/interface/thread_wrapper.h"
25#include "webrtc/system_wrappers/interface/trace_event.h"
26#include "webrtc/video_engine/include/vie_image_process.h"
27#include "webrtc/video_engine/overuse_frame_detector.h"
28#include "webrtc/video_engine/vie_defines.h"
29#include "webrtc/video_engine/vie_encoder.h"
30
31namespace webrtc {
32
33const int kThreadWaitTimeMs = 100;
34
35ViECapturer::ViECapturer(int capture_id,
36                         int engine_id,
37                         const Config& config,
38                         ProcessThread& module_process_thread)
39    : ViEFrameProviderBase(capture_id, engine_id),
40      capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
41      deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
42      capture_module_(NULL),
43      external_capture_module_(NULL),
44      module_process_thread_(module_process_thread),
45      capture_id_(capture_id),
46      incoming_frame_cs_(CriticalSectionWrapper::CreateCriticalSection()),
47      capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
48                                                   this, kHighPriority,
49                                                   "ViECaptureThread")),
50      capture_event_(*EventWrapper::Create()),
51      deliver_event_(*EventWrapper::Create()),
52      effect_filter_(NULL),
53      image_proc_module_(NULL),
54      image_proc_module_ref_counter_(0),
55      deflicker_frame_stats_(NULL),
56      brightness_frame_stats_(NULL),
57      current_brightness_level_(Normal),
58      reported_brightness_level_(Normal),
59      observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
60      observer_(NULL),
61      overuse_detector_(new OveruseFrameDetector(Clock::GetRealTimeClock())) {
62  unsigned int t_id = 0;
63  if (!capture_thread_.Start(t_id)) {
64    assert(false);
65  }
66  module_process_thread_.RegisterModule(overuse_detector_.get());
67}
68
69ViECapturer::~ViECapturer() {
70  module_process_thread_.DeRegisterModule(overuse_detector_.get());
71
72  // Stop the thread.
73  deliver_cs_->Enter();
74  capture_cs_->Enter();
75  capture_thread_.SetNotAlive();
76  capture_event_.Set();
77  capture_cs_->Leave();
78  deliver_cs_->Leave();
79
80  // Stop the camera input.
81  if (capture_module_) {
82    module_process_thread_.DeRegisterModule(capture_module_);
83    capture_module_->DeRegisterCaptureDataCallback();
84    capture_module_->Release();
85    capture_module_ = NULL;
86  }
87  if (capture_thread_.Stop()) {
88    // Thread stopped.
89    delete &capture_thread_;
90    delete &capture_event_;
91    delete &deliver_event_;
92  } else {
93    assert(false);
94  }
95
96  if (image_proc_module_) {
97    VideoProcessingModule::Destroy(image_proc_module_);
98  }
99  if (deflicker_frame_stats_) {
100    delete deflicker_frame_stats_;
101    deflicker_frame_stats_ = NULL;
102  }
103  delete brightness_frame_stats_;
104}
105
106ViECapturer* ViECapturer::CreateViECapture(
107    int capture_id,
108    int engine_id,
109    const Config& config,
110    VideoCaptureModule* capture_module,
111    ProcessThread& module_process_thread) {
112  ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
113                                         module_process_thread);
114  if (!capture || capture->Init(capture_module) != 0) {
115    delete capture;
116    capture = NULL;
117  }
118  return capture;
119}
120
121int32_t ViECapturer::Init(VideoCaptureModule* capture_module) {
122  assert(capture_module_ == NULL);
123  capture_module_ = capture_module;
124  capture_module_->RegisterCaptureDataCallback(*this);
125  capture_module_->AddRef();
126  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
127    return -1;
128  }
129
130  return 0;
131}
132
133ViECapturer* ViECapturer::CreateViECapture(
134    int capture_id,
135    int engine_id,
136    const Config& config,
137    const char* device_unique_idUTF8,
138    const uint32_t device_unique_idUTF8Length,
139    ProcessThread& module_process_thread) {
140  ViECapturer* capture = new ViECapturer(capture_id, engine_id, config,
141                                         module_process_thread);
142  if (!capture ||
143      capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
144    delete capture;
145    capture = NULL;
146  }
147  return capture;
148}
149
150int32_t ViECapturer::Init(const char* device_unique_idUTF8,
151                          uint32_t device_unique_idUTF8Length) {
152  assert(capture_module_ == NULL);
153  if (device_unique_idUTF8 == NULL) {
154    capture_module_  = VideoCaptureFactory::Create(
155        ViEModuleId(engine_id_, capture_id_), external_capture_module_);
156  } else {
157    capture_module_ = VideoCaptureFactory::Create(
158        ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
159  }
160  if (!capture_module_) {
161    return -1;
162  }
163  capture_module_->AddRef();
164  capture_module_->RegisterCaptureDataCallback(*this);
165  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
166    return -1;
167  }
168
169  return 0;
170}
171
172int ViECapturer::FrameCallbackChanged() {
173  if (Started() && !CaptureCapabilityFixed()) {
174    // Reconfigure the camera if a new size is required and the capture device
175    // does not provide encoded frames.
176    int best_width;
177    int best_height;
178    int best_frame_rate;
179    VideoCaptureCapability capture_settings;
180    capture_module_->CaptureSettings(capture_settings);
181    GetBestFormat(&best_width, &best_height, &best_frame_rate);
182    if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
183      if (best_width != capture_settings.width ||
184          best_height != capture_settings.height ||
185          best_frame_rate != capture_settings.maxFPS ||
186          capture_settings.codecType != kVideoCodecUnknown) {
187        Stop();
188        Start(requested_capability_);
189      }
190    }
191  }
192  return 0;
193}
194
195int32_t ViECapturer::Start(const CaptureCapability& capture_capability) {
196  int width;
197  int height;
198  int frame_rate;
199  VideoCaptureCapability capability;
200  requested_capability_ = capture_capability;
201
202  if (!CaptureCapabilityFixed()) {
203    // Ask the observers for best size.
204    GetBestFormat(&width, &height, &frame_rate);
205    if (width == 0) {
206      width = kViECaptureDefaultWidth;
207    }
208    if (height == 0) {
209      height = kViECaptureDefaultHeight;
210    }
211    if (frame_rate == 0) {
212      frame_rate = kViECaptureDefaultFramerate;
213    }
214    capability.height = height;
215    capability.width = width;
216    capability.maxFPS = frame_rate;
217    capability.rawType = kVideoI420;
218    capability.codecType = kVideoCodecUnknown;
219  } else {
220    // Width, height and type specified with call to Start, not set by
221    // observers.
222    capability.width = requested_capability_.width;
223    capability.height = requested_capability_.height;
224    capability.maxFPS = requested_capability_.maxFPS;
225    capability.rawType = requested_capability_.rawType;
226    capability.interlaced = requested_capability_.interlaced;
227  }
228  return capture_module_->StartCapture(capability);
229}
230
231int32_t ViECapturer::Stop() {
232  requested_capability_ = CaptureCapability();
233  return capture_module_->StopCapture();
234}
235
236bool ViECapturer::Started() {
237  return capture_module_->CaptureStarted();
238}
239
240const char* ViECapturer::CurrentDeviceName() const {
241  return capture_module_->CurrentDeviceName();
242}
243
244void ViECapturer::RegisterCpuOveruseObserver(CpuOveruseObserver* observer) {
245  overuse_detector_->SetObserver(observer);
246}
247
248void ViECapturer::SetCpuOveruseOptions(const CpuOveruseOptions& options) {
249  overuse_detector_->SetOptions(options);
250}
251
252void ViECapturer::GetCpuOveruseMetrics(CpuOveruseMetrics* metrics) const {
253  overuse_detector_->GetCpuOveruseMetrics(metrics);
254}
255
256int32_t ViECapturer::SetCaptureDelay(int32_t delay_ms) {
257  capture_module_->SetCaptureDelay(delay_ms);
258  return 0;
259}
260
261int32_t ViECapturer::SetRotateCapturedFrames(
262  const RotateCapturedFrame rotation) {
263  VideoCaptureRotation converted_rotation = kCameraRotate0;
264  switch (rotation) {
265    case RotateCapturedFrame_0:
266      converted_rotation = kCameraRotate0;
267      break;
268    case RotateCapturedFrame_90:
269      converted_rotation = kCameraRotate90;
270      break;
271    case RotateCapturedFrame_180:
272      converted_rotation = kCameraRotate180;
273      break;
274    case RotateCapturedFrame_270:
275      converted_rotation = kCameraRotate270;
276      break;
277  }
278  return capture_module_->SetCaptureRotation(converted_rotation);
279}
280
281int ViECapturer::IncomingFrame(unsigned char* video_frame,
282                               unsigned int video_frame_length,
283                               uint16_t width,
284                               uint16_t height,
285                               RawVideoType video_type,
286                               unsigned long long capture_time) {  // NOLINT
287  if (!external_capture_module_) {
288    return -1;
289  }
290  VideoCaptureCapability capability;
291  capability.width = width;
292  capability.height = height;
293  capability.rawType = video_type;
294  return external_capture_module_->IncomingFrame(video_frame,
295                                                 video_frame_length,
296                                                 capability, capture_time);
297}
298
299int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
300                                   unsigned long long capture_time) {  // NOLINT
301  if (!external_capture_module_) {
302    return -1;
303  }
304
305  int size_y = video_frame.height * video_frame.y_pitch;
306  int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
307  int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
308  CriticalSectionScoped cs(incoming_frame_cs_.get());
309  int ret = incoming_frame_.CreateFrame(size_y,
310                                       video_frame.y_plane,
311                                       size_u,
312                                       video_frame.u_plane,
313                                       size_v,
314                                       video_frame.v_plane,
315                                       video_frame.width,
316                                       video_frame.height,
317                                       video_frame.y_pitch,
318                                       video_frame.u_pitch,
319                                       video_frame.v_pitch);
320
321  if (ret < 0) {
322    LOG_F(LS_ERROR) << "Could not create I420Frame.";
323    return -1;
324  }
325
326  return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_,
327                                                          capture_time);
328}
329
330void ViECapturer::SwapFrame(I420VideoFrame* frame) {
331  external_capture_module_->IncomingI420VideoFrame(frame,
332                                                   frame->render_time_ms());
333  frame->set_timestamp(0);
334  frame->set_ntp_time_ms(0);
335  frame->set_render_time_ms(0);
336}
337
338void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
339                                          I420VideoFrame& video_frame) {
340  CriticalSectionScoped cs(capture_cs_.get());
341  // Make sure we render this frame earlier since we know the render time set
342  // is slightly off since it's being set when the frame has been received from
343  // the camera, and not when the camera actually captured the frame.
344  video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
345
346  TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
347                           "render_time", video_frame.render_time_ms());
348
349  if (video_frame.native_handle() != NULL) {
350    captured_frame_.reset(video_frame.CloneFrame());
351  } else {
352    if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
353      captured_frame_.reset(new I420VideoFrame());
354    captured_frame_->SwapFrame(&video_frame);
355  }
356  capture_event_.Set();
357  overuse_detector_->FrameCaptured(captured_frame_->width(),
358                                   captured_frame_->height());
359}
360
361void ViECapturer::OnCaptureDelayChanged(const int32_t id,
362                                        const int32_t delay) {
363  LOG(LS_INFO) << "Capture delayed change to " << delay
364               << " for device " << id;
365
366  // Deliver the network delay to all registered callbacks.
367  ViEFrameProviderBase::SetFrameDelay(delay);
368}
369
370int32_t ViECapturer::RegisterEffectFilter(
371    ViEEffectFilter* effect_filter) {
372  CriticalSectionScoped cs(deliver_cs_.get());
373
374  if (effect_filter != NULL && effect_filter_ != NULL) {
375    LOG_F(LS_ERROR) << "Effect filter already registered.";
376    return -1;
377  }
378  effect_filter_ = effect_filter;
379  return 0;
380}
381
382int32_t ViECapturer::IncImageProcRefCount() {
383  if (!image_proc_module_) {
384    assert(image_proc_module_ref_counter_ == 0);
385    image_proc_module_ = VideoProcessingModule::Create(
386        ViEModuleId(engine_id_, capture_id_));
387    if (!image_proc_module_) {
388      LOG_F(LS_ERROR) << "Could not create video processing module.";
389      return -1;
390    }
391  }
392  image_proc_module_ref_counter_++;
393  return 0;
394}
395
396int32_t ViECapturer::DecImageProcRefCount() {
397  image_proc_module_ref_counter_--;
398  if (image_proc_module_ref_counter_ == 0) {
399    // Destroy module.
400    VideoProcessingModule::Destroy(image_proc_module_);
401    image_proc_module_ = NULL;
402  }
403  return 0;
404}
405
406int32_t ViECapturer::EnableDeflickering(bool enable) {
407  CriticalSectionScoped cs(deliver_cs_.get());
408  if (enable) {
409    if (deflicker_frame_stats_) {
410      return -1;
411    }
412    if (IncImageProcRefCount() != 0) {
413      return -1;
414    }
415    deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
416  } else {
417    if (deflicker_frame_stats_ == NULL) {
418      return -1;
419    }
420    DecImageProcRefCount();
421    delete deflicker_frame_stats_;
422    deflicker_frame_stats_ = NULL;
423  }
424  return 0;
425}
426
427int32_t ViECapturer::EnableBrightnessAlarm(bool enable) {
428  CriticalSectionScoped cs(deliver_cs_.get());
429  if (enable) {
430    if (brightness_frame_stats_) {
431      return -1;
432    }
433    if (IncImageProcRefCount() != 0) {
434      return -1;
435    }
436    brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
437  } else {
438    DecImageProcRefCount();
439    if (brightness_frame_stats_ == NULL) {
440      return -1;
441    }
442    delete brightness_frame_stats_;
443    brightness_frame_stats_ = NULL;
444  }
445  return 0;
446}
447
448bool ViECapturer::ViECaptureThreadFunction(void* obj) {
449  return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
450}
451
452bool ViECapturer::ViECaptureProcess() {
453  if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
454    overuse_detector_->FrameProcessingStarted();
455    int64_t encode_start_time = -1;
456    deliver_cs_->Enter();
457    if (SwapCapturedAndDeliverFrameIfAvailable()) {
458      encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
459      DeliverI420Frame(deliver_frame_.get());
460      if (deliver_frame_->native_handle() != NULL)
461        deliver_frame_.reset();  // Release the texture so it can be reused.
462    }
463    deliver_cs_->Leave();
464    if (current_brightness_level_ != reported_brightness_level_) {
465      CriticalSectionScoped cs(observer_cs_.get());
466      if (observer_) {
467        observer_->BrightnessAlarm(id_, current_brightness_level_);
468        reported_brightness_level_ = current_brightness_level_;
469      }
470    }
471    // Update the overuse detector with the duration.
472    if (encode_start_time != -1) {
473      overuse_detector_->FrameEncoded(
474          Clock::GetRealTimeClock()->TimeInMilliseconds() - encode_start_time);
475    }
476  }
477  // We're done!
478  return true;
479}
480
481void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
482  if (video_frame->native_handle() != NULL) {
483    ViEFrameProviderBase::DeliverFrame(video_frame);
484    return;
485  }
486
487  // Apply image enhancement and effect filter.
488  if (deflicker_frame_stats_) {
489    if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
490                                          *video_frame) == 0) {
491      image_proc_module_->Deflickering(video_frame, deflicker_frame_stats_);
492    } else {
493      LOG_F(LS_ERROR) << "Could not get frame stats.";
494    }
495  }
496  if (brightness_frame_stats_) {
497    if (image_proc_module_->GetFrameStats(brightness_frame_stats_,
498                                          *video_frame) == 0) {
499      int32_t brightness = image_proc_module_->BrightnessDetection(
500          *video_frame, *brightness_frame_stats_);
501
502      switch (brightness) {
503      case VideoProcessingModule::kNoWarning:
504        current_brightness_level_ = Normal;
505        break;
506      case VideoProcessingModule::kDarkWarning:
507        current_brightness_level_ = Dark;
508        break;
509      case VideoProcessingModule::kBrightWarning:
510        current_brightness_level_ = Bright;
511        break;
512      default:
513        break;
514      }
515    }
516  }
517  if (effect_filter_) {
518    unsigned int length = CalcBufferSize(kI420,
519                                         video_frame->width(),
520                                         video_frame->height());
521    scoped_ptr<uint8_t[]> video_buffer(new uint8_t[length]);
522    ExtractBuffer(*video_frame, length, video_buffer.get());
523    effect_filter_->Transform(length,
524                              video_buffer.get(),
525                              video_frame->ntp_time_ms(),
526                              video_frame->timestamp(),
527                              video_frame->width(),
528                              video_frame->height());
529  }
530  // Deliver the captured frame to all observers (channels, renderer or file).
531  ViEFrameProviderBase::DeliverFrame(video_frame);
532}
533
534int ViECapturer::DeregisterFrameCallback(
535    const ViEFrameCallback* callbackObject) {
536  return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
537}
538
539bool ViECapturer::IsFrameCallbackRegistered(
540    const ViEFrameCallback* callbackObject) {
541  CriticalSectionScoped cs(provider_cs_.get());
542  return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
543}
544
545bool ViECapturer::CaptureCapabilityFixed() {
546  return requested_capability_.width != 0 &&
547      requested_capability_.height != 0 &&
548      requested_capability_.maxFPS != 0;
549}
550
551int32_t ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
552  {
553    CriticalSectionScoped cs(observer_cs_.get());
554    if (observer_) {
555      LOG_F(LS_ERROR) << "Observer already registered.";
556      return -1;
557    }
558    observer_ = observer;
559  }
560  capture_module_->RegisterCaptureCallback(*this);
561  capture_module_->EnableFrameRateCallback(true);
562  capture_module_->EnableNoPictureAlarm(true);
563  return 0;
564}
565
566int32_t ViECapturer::DeRegisterObserver() {
567  capture_module_->EnableFrameRateCallback(false);
568  capture_module_->EnableNoPictureAlarm(false);
569  capture_module_->DeRegisterCaptureCallback();
570
571  CriticalSectionScoped cs(observer_cs_.get());
572  observer_ = NULL;
573  return 0;
574}
575
576bool ViECapturer::IsObserverRegistered() {
577  CriticalSectionScoped cs(observer_cs_.get());
578  return observer_ != NULL;
579}
580
581void ViECapturer::OnCaptureFrameRate(const int32_t id,
582                                     const uint32_t frame_rate) {
583  CriticalSectionScoped cs(observer_cs_.get());
584  observer_->CapturedFrameRate(id_, static_cast<uint8_t>(frame_rate));
585}
586
587void ViECapturer::OnNoPictureAlarm(const int32_t id,
588                                   const VideoCaptureAlarm alarm) {
589  LOG(LS_WARNING) << "OnNoPictureAlarm " << id;
590
591  CriticalSectionScoped cs(observer_cs_.get());
592  CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
593  observer_->NoPictureAlarm(id, vie_alarm);
594}
595
596bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
597  CriticalSectionScoped cs(capture_cs_.get());
598  if (captured_frame_ == NULL)
599    return false;
600
601  if (captured_frame_->native_handle() != NULL) {
602    deliver_frame_.reset(captured_frame_.release());
603    return true;
604  }
605
606  if (captured_frame_->IsZeroSize())
607    return false;
608
609  if (deliver_frame_ == NULL)
610    deliver_frame_.reset(new I420VideoFrame());
611  deliver_frame_->SwapFrame(captured_frame_.get());
612  captured_frame_->ResetSize();
613  return true;
614}
615
616}  // namespace webrtc
617