1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/renderer/media/media_stream_dependency_factory.h"
6
7#include <vector>
8
9#include "base/command_line.h"
10#include "base/strings/utf_string_conversions.h"
11#include "base/synchronization/waitable_event.h"
12#include "content/public/common/content_switches.h"
13#include "content/renderer/media/media_stream_source_extra_data.h"
14#include "content/renderer/media/peer_connection_identity_service.h"
15#include "content/renderer/media/rtc_media_constraints.h"
16#include "content/renderer/media/rtc_peer_connection_handler.h"
17#include "content/renderer/media/rtc_video_capturer.h"
18#include "content/renderer/media/rtc_video_decoder_factory.h"
19#include "content/renderer/media/video_capture_impl_manager.h"
20#include "content/renderer/media/webaudio_capturer_source.h"
21#include "content/renderer/media/webrtc_audio_device_impl.h"
22#include "content/renderer/media/webrtc_local_audio_track.h"
23#include "content/renderer/media/webrtc_logging_initializer.h"
24#include "content/renderer/media/webrtc_uma_histograms.h"
25#include "content/renderer/p2p/ipc_network_manager.h"
26#include "content/renderer/p2p/ipc_socket_factory.h"
27#include "content/renderer/p2p/port_allocator.h"
28#include "content/renderer/render_thread_impl.h"
29#include "jingle/glue/thread_wrapper.h"
30#include "media/filters/gpu_video_decoder_factories.h"
31#include "third_party/WebKit/public/platform/WebMediaConstraints.h"
32#include "third_party/WebKit/public/platform/WebMediaStream.h"
33#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
34#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
35#include "third_party/WebKit/public/platform/WebURL.h"
36#include "third_party/WebKit/public/web/WebDocument.h"
37#include "third_party/WebKit/public/web/WebFrame.h"
38#include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
39
40#if defined(USE_OPENSSL)
41#include "third_party/libjingle/source/talk/base/ssladapter.h"
42#else
43#include "net/socket/nss_ssl_util.h"
44#endif
45
46#if defined(GOOGLE_TV)
47#include "content/renderer/media/rtc_video_decoder_factory_tv.h"
48#endif
49
50namespace content {
51
52// The constraint key for the PeerConnection constructor for enabling diagnostic
53// WebRTC logging. It's a Google specific key, hence the "goog" prefix.
54const char kWebRtcLoggingConstraint[] = "googLog";
55
56// Constant constraint keys which disables all audio constraints.
57// Only used in combination with WebAudio sources.
58struct {
59  const char* key;
60  const char* value;
61} const kWebAudioConstraints[] = {
62    {webrtc::MediaConstraintsInterface::kEchoCancellation,
63     webrtc::MediaConstraintsInterface::kValueTrue},
64    {webrtc::MediaConstraintsInterface::kAutoGainControl,
65     webrtc::MediaConstraintsInterface::kValueTrue},
66    {webrtc::MediaConstraintsInterface::kNoiseSuppression,
67     webrtc::MediaConstraintsInterface::kValueTrue},
68    {webrtc::MediaConstraintsInterface::kHighpassFilter,
69     webrtc::MediaConstraintsInterface::kValueTrue},
70};
71
72class WebAudioConstraints : public RTCMediaConstraints {
73 public:
74  WebAudioConstraints()
75      : RTCMediaConstraints(WebKit::WebMediaConstraints()) {
76    for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kWebAudioConstraints); ++i) {
77      webrtc::MediaConstraintsInterface::Constraint constraint;
78      constraint.key = kWebAudioConstraints[i].key;
79      constraint.value = kWebAudioConstraints[i].value;
80
81      DVLOG(1) << "WebAudioConstraints: " << constraint.key
82               << " : " <<  constraint.value;
83      mandatory_.push_back(constraint);
84    }
85  }
86
87  virtual ~WebAudioConstraints() {}
88};
89
90class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
91 public:
92  P2PPortAllocatorFactory(
93      P2PSocketDispatcher* socket_dispatcher,
94      talk_base::NetworkManager* network_manager,
95      talk_base::PacketSocketFactory* socket_factory,
96      WebKit::WebFrame* web_frame)
97      : socket_dispatcher_(socket_dispatcher),
98        network_manager_(network_manager),
99        socket_factory_(socket_factory),
100        web_frame_(web_frame) {
101  }
102
103  virtual cricket::PortAllocator* CreatePortAllocator(
104      const std::vector<StunConfiguration>& stun_servers,
105      const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
106    CHECK(web_frame_);
107    P2PPortAllocator::Config config;
108    if (stun_servers.size() > 0) {
109      config.stun_server = stun_servers[0].server.hostname();
110      config.stun_server_port = stun_servers[0].server.port();
111    }
112    config.legacy_relay = false;
113    for (size_t i = 0; i < turn_configurations.size(); ++i) {
114      P2PPortAllocator::Config::RelayServerConfig relay_config;
115      relay_config.server_address = turn_configurations[i].server.hostname();
116      relay_config.port = turn_configurations[i].server.port();
117      relay_config.username = turn_configurations[i].username;
118      relay_config.password = turn_configurations[i].password;
119      relay_config.transport_type = turn_configurations[i].transport_type;
120      relay_config.secure = turn_configurations[i].secure;
121      config.relays.push_back(relay_config);
122    }
123
124    // Use first turn server as the stun server.
125    if (turn_configurations.size() > 0) {
126      config.stun_server = config.relays[0].server_address;
127      config.stun_server_port = config.relays[0].port;
128    }
129
130    return new P2PPortAllocator(
131        web_frame_, socket_dispatcher_.get(), network_manager_,
132        socket_factory_, config);
133  }
134
135 protected:
136  virtual ~P2PPortAllocatorFactory() {}
137
138 private:
139  scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
140  // |network_manager_| and |socket_factory_| are a weak references, owned by
141  // MediaStreamDependencyFactory.
142  talk_base::NetworkManager* network_manager_;
143  talk_base::PacketSocketFactory* socket_factory_;
144  // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
145  WebKit::WebFrame* web_frame_;
146};
147
148// SourceStateObserver is a help class used for observing the startup state
149// transition of webrtc media sources such as a camera or microphone.
150// An instance of the object deletes itself after use.
151// Usage:
152// 1. Create an instance of the object with the WebKit::WebMediaStream
153//    the observed sources belongs to a callback.
154// 2. Add the sources to the observer using AddSource.
155// 3. Call StartObserving()
156// 4. The callback will be triggered when all sources have transitioned from
157//    webrtc::MediaSourceInterface::kInitializing.
158class SourceStateObserver : public webrtc::ObserverInterface,
159                            public base::NonThreadSafe {
160 public:
161  SourceStateObserver(
162      WebKit::WebMediaStream* web_stream,
163      const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
164     : web_stream_(web_stream),
165       ready_callback_(callback),
166       live_(true) {
167  }
168
169  void AddSource(webrtc::MediaSourceInterface* source) {
170    DCHECK(CalledOnValidThread());
171    switch (source->state()) {
172      case webrtc::MediaSourceInterface::kInitializing:
173        sources_.push_back(source);
174        source->RegisterObserver(this);
175        break;
176      case webrtc::MediaSourceInterface::kLive:
177        // The source is already live so we don't need to wait for it.
178        break;
179      case webrtc::MediaSourceInterface::kEnded:
180        // The source have already failed.
181        live_ = false;
182        break;
183      default:
184        NOTREACHED();
185    }
186  }
187
188  void StartObservering() {
189    DCHECK(CalledOnValidThread());
190    CheckIfSourcesAreLive();
191  }
192
193  virtual void OnChanged() OVERRIDE {
194    DCHECK(CalledOnValidThread());
195    CheckIfSourcesAreLive();
196  }
197
198 private:
199  void CheckIfSourcesAreLive() {
200    ObservedSources::iterator it = sources_.begin();
201    while (it != sources_.end()) {
202      if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
203        live_ &=  (*it)->state() == webrtc::MediaSourceInterface::kLive;
204        (*it)->UnregisterObserver(this);
205        it = sources_.erase(it);
206      } else {
207        ++it;
208      }
209    }
210    if (sources_.empty()) {
211      ready_callback_.Run(web_stream_, live_);
212      delete this;
213    }
214  }
215
216  WebKit::WebMediaStream* web_stream_;
217  MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
218  bool live_;
219  typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
220      ObservedSources;
221  ObservedSources sources_;
222};
223
224MediaStreamDependencyFactory::MediaStreamDependencyFactory(
225    VideoCaptureImplManager* vc_manager,
226    P2PSocketDispatcher* p2p_socket_dispatcher)
227    : network_manager_(NULL),
228#if defined(GOOGLE_TV)
229      decoder_factory_tv_(NULL),
230#endif
231      vc_manager_(vc_manager),
232      p2p_socket_dispatcher_(p2p_socket_dispatcher),
233      signaling_thread_(NULL),
234      worker_thread_(NULL),
235      chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
236}
237
238MediaStreamDependencyFactory::~MediaStreamDependencyFactory() {
239  CleanupPeerConnectionFactory();
240}
241
242WebKit::WebRTCPeerConnectionHandler*
243MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
244    WebKit::WebRTCPeerConnectionHandlerClient* client) {
245  // Save histogram data so we can see how much PeerConnetion is used.
246  // The histogram counts the number of calls to the JS API
247  // webKitRTCPeerConnection.
248  UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
249
250  if (!EnsurePeerConnectionFactory())
251    return NULL;
252
253  return new RTCPeerConnectionHandler(client, this);
254}
255
256void MediaStreamDependencyFactory::CreateNativeMediaSources(
257    int render_view_id,
258    const WebKit::WebMediaConstraints& audio_constraints,
259    const WebKit::WebMediaConstraints& video_constraints,
260    WebKit::WebMediaStream* web_stream,
261    const MediaSourcesCreatedCallback& sources_created) {
262  DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
263  if (!EnsurePeerConnectionFactory()) {
264    sources_created.Run(web_stream, false);
265    return;
266  }
267
268  // |source_observer| clean up itself when it has completed
269  // source_observer->StartObservering.
270  SourceStateObserver* source_observer =
271      new SourceStateObserver(web_stream, sources_created);
272
273  // Create local video sources.
274  RTCMediaConstraints native_video_constraints(video_constraints);
275  WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
276  web_stream->videoTracks(video_tracks);
277  for (size_t i = 0; i < video_tracks.size(); ++i) {
278    const WebKit::WebMediaStreamSource& source = video_tracks[i].source();
279    MediaStreamSourceExtraData* source_data =
280        static_cast<MediaStreamSourceExtraData*>(source.extraData());
281    if (!source_data) {
282      // TODO(perkj): Implement support for sources from remote MediaStreams.
283      NOTIMPLEMENTED();
284      continue;
285    }
286    const bool is_screencast =
287        source_data->device_info().device.type ==
288            content::MEDIA_TAB_VIDEO_CAPTURE ||
289        source_data->device_info().device.type ==
290            content::MEDIA_DESKTOP_VIDEO_CAPTURE;
291    source_data->SetVideoSource(
292        CreateLocalVideoSource(source_data->device_info().session_id,
293                               is_screencast,
294                               &native_video_constraints).get());
295    source_observer->AddSource(source_data->video_source());
296  }
297
298  // Do additional source initialization if the audio source is a valid
299  // microphone or tab audio.
300  RTCMediaConstraints native_audio_constraints(audio_constraints);
301  WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
302  web_stream->audioTracks(audio_tracks);
303  const CommandLine& command_line = *CommandLine::ForCurrentProcess();
304  if (command_line.HasSwitch(switches::kEnableWebRtcAecRecordings)) {
305    native_audio_constraints.AddOptional(
306        RTCMediaConstraints::kInternalAecDump, "true");
307  }
308  for (size_t i = 0; i < audio_tracks.size(); ++i) {
309    const WebKit::WebMediaStreamSource& source = audio_tracks[i].source();
310    MediaStreamSourceExtraData* source_data =
311        static_cast<MediaStreamSourceExtraData*>(source.extraData());
312    if (!source_data) {
313      // TODO(henrika): Implement support for sources from remote MediaStreams.
314      NOTIMPLEMENTED();
315      continue;
316    }
317
318    // TODO(xians): Create a new capturer for difference microphones when we
319    // support multiple microphones. See issue crbug/262117 .
320    const StreamDeviceInfo device_info = source_data->device_info();
321    scoped_refptr<WebRtcAudioCapturer> capturer(
322        MaybeCreateAudioCapturer(render_view_id, device_info));
323    if (!capturer.get()) {
324      DLOG(WARNING) << "Failed to create the capturer for device "
325                    << device_info.device.id;
326      sources_created.Run(web_stream, false);
327      return;
328    }
329
330    // Creates a LocalAudioSource object which holds audio options.
331    // TODO(xians): The option should apply to the track instead of the source.
332    source_data->SetLocalAudioSource(
333        CreateLocalAudioSource(&native_audio_constraints).get());
334    source_observer->AddSource(source_data->local_audio_source());
335  }
336
337  source_observer->StartObservering();
338}
339
340void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
341    WebKit::WebMediaStream* web_stream) {
342  DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
343  if (!EnsurePeerConnectionFactory()) {
344    DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
345    return;
346  }
347
348  std::string label = UTF16ToUTF8(web_stream->id());
349  scoped_refptr<webrtc::MediaStreamInterface> native_stream =
350      CreateLocalMediaStream(label);
351  MediaStreamExtraData* extra_data =
352      new MediaStreamExtraData(native_stream.get(), true);
353  web_stream->setExtraData(extra_data);
354
355  // Add audio tracks.
356  WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks;
357  web_stream->audioTracks(audio_tracks);
358  for (size_t i = 0; i < audio_tracks.size(); ++i) {
359    AddNativeMediaStreamTrack(*web_stream, audio_tracks[i]);
360  }
361
362  // Add video tracks.
363  WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks;
364  web_stream->videoTracks(video_tracks);
365  for (size_t i = 0; i < video_tracks.size(); ++i) {
366    AddNativeMediaStreamTrack(*web_stream, video_tracks[i]);
367  }
368}
369
370void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
371    WebKit::WebMediaStream* web_stream,
372    const MediaStreamExtraData::StreamStopCallback& stream_stop) {
373  CreateNativeLocalMediaStream(web_stream);
374
375  MediaStreamExtraData* extra_data =
376     static_cast<MediaStreamExtraData*>(web_stream->extraData());
377  extra_data->SetLocalStreamStopCallback(stream_stop);
378}
379
380bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack(
381      const WebKit::WebMediaStream& stream,
382      const WebKit::WebMediaStreamTrack& track) {
383  MediaStreamExtraData* extra_data =
384     static_cast<MediaStreamExtraData*>(stream.extraData());
385  webrtc::MediaStreamInterface* native_stream = extra_data->stream().get();
386  DCHECK(native_stream);
387
388  WebKit::WebMediaStreamSource source = track.source();
389  MediaStreamSourceExtraData* source_data =
390      static_cast<MediaStreamSourceExtraData*>(source.extraData());
391
392  scoped_refptr<WebRtcAudioCapturer> capturer;
393  if (!source_data) {
394    if (source.requiresAudioConsumer()) {
395      // We're adding a WebAudio MediaStream.
396      // Create a specific capturer for each WebAudio consumer.
397      capturer = CreateWebAudioSource(&source);
398      source_data =
399          static_cast<MediaStreamSourceExtraData*>(source.extraData());
400    } else {
401      // TODO(perkj): Implement support for sources from
402      // remote MediaStreams.
403      NOTIMPLEMENTED();
404      return false;
405    }
406  }
407
408  WebKit::WebMediaStreamSource::Type type = track.source().type();
409  DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
410         type == WebKit::WebMediaStreamSource::TypeVideo);
411
412  std::string track_id = UTF16ToUTF8(track.id());
413  if (source.type() == WebKit::WebMediaStreamSource::TypeAudio) {
414    if (!capturer.get() && GetWebRtcAudioDevice())
415      capturer = GetWebRtcAudioDevice()->GetDefaultCapturer();
416
417    scoped_refptr<webrtc::AudioTrackInterface> audio_track(
418        CreateLocalAudioTrack(track_id,
419                              capturer,
420                              source_data->local_audio_source()));
421    audio_track->set_enabled(track.isEnabled());
422    return native_stream->AddTrack(audio_track.get());
423  } else {
424    DCHECK(source.type() == WebKit::WebMediaStreamSource::TypeVideo);
425    scoped_refptr<webrtc::VideoTrackInterface> video_track(
426        CreateLocalVideoTrack(track_id, source_data->video_source()));
427    video_track->set_enabled(track.isEnabled());
428    return native_stream->AddTrack(video_track.get());
429  }
430}
431
432bool MediaStreamDependencyFactory::AddNativeVideoMediaTrack(
433    const std::string& track_id,
434    WebKit::WebMediaStream* stream,
435    cricket::VideoCapturer* capturer) {
436  if (!stream) {
437    LOG(ERROR) << "AddNativeVideoMediaTrack called with null WebMediaStream.";
438    return false;
439  }
440
441  // Create native track from the source.
442  scoped_refptr<webrtc::VideoTrackInterface> native_track =
443      CreateLocalVideoTrack(track_id, capturer);
444
445  // Add the native track to native stream
446  MediaStreamExtraData* extra_data =
447      static_cast<MediaStreamExtraData*>(stream->extraData());
448  DCHECK(extra_data);
449  webrtc::MediaStreamInterface* native_stream = extra_data->stream().get();
450  native_stream->AddTrack(native_track.get());
451
452  // Create a new webkit video track.
453  WebKit::WebMediaStreamTrack webkit_track;
454  WebKit::WebMediaStreamSource webkit_source;
455  WebKit::WebString webkit_track_id(UTF8ToUTF16(track_id));
456  WebKit::WebMediaStreamSource::Type type =
457      WebKit::WebMediaStreamSource::TypeVideo;
458  webkit_source.initialize(webkit_track_id, type, webkit_track_id);
459  webkit_track.initialize(webkit_track_id, webkit_source);
460
461  // Add the track to WebMediaStream.
462  stream->addTrack(webkit_track);
463  return true;
464}
465
466bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack(
467    const WebKit::WebMediaStream& stream,
468    const WebKit::WebMediaStreamTrack& track) {
469  MediaStreamExtraData* extra_data =
470      static_cast<MediaStreamExtraData*>(stream.extraData());
471  webrtc::MediaStreamInterface* native_stream = extra_data->stream().get();
472  DCHECK(native_stream);
473
474  WebKit::WebMediaStreamSource::Type type = track.source().type();
475  DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio ||
476         type == WebKit::WebMediaStreamSource::TypeVideo);
477
478  std::string track_id = UTF16ToUTF8(track.id());
479  return type == WebKit::WebMediaStreamSource::TypeAudio ?
480      native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)) :
481      native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id));
482}
483
484bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
485  DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
486  if (!pc_factory_.get()) {
487    DCHECK(!audio_device_.get());
488    audio_device_ = new WebRtcAudioDeviceImpl();
489
490    scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
491
492    const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
493    if (cmd_line->HasSwitch(switches::kEnableWebRtcHWDecoding)) {
494      scoped_refptr<base::MessageLoopProxy> media_loop_proxy =
495          RenderThreadImpl::current()->GetMediaThreadMessageLoopProxy();
496      scoped_refptr<RendererGpuVideoDecoderFactories> gpu_factories =
497          RenderThreadImpl::current()->GetGpuFactories(media_loop_proxy);
498      if (gpu_factories.get() != NULL)
499        decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
500    }
501#if defined(GOOGLE_TV)
502    // PeerConnectionFactory will hold the ownership of this
503    // VideoDecoderFactory.
504    decoder_factory.reset(decoder_factory_tv_ = new RTCVideoDecoderFactoryTv);
505#endif
506
507    scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
508        webrtc::CreatePeerConnectionFactory(worker_thread_,
509                                            signaling_thread_,
510                                            audio_device_.get(),
511                                            NULL,
512                                            decoder_factory.release()));
513    if (factory.get())
514      pc_factory_ = factory;
515    else
516      audio_device_ = NULL;
517  }
518  return pc_factory_.get() != NULL;
519}
520
521bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
522  return pc_factory_.get() != NULL;
523}
524
525scoped_refptr<webrtc::PeerConnectionInterface>
526MediaStreamDependencyFactory::CreatePeerConnection(
527    const webrtc::PeerConnectionInterface::IceServers& ice_servers,
528    const webrtc::MediaConstraintsInterface* constraints,
529    WebKit::WebFrame* web_frame,
530    webrtc::PeerConnectionObserver* observer) {
531  CHECK(web_frame);
532  CHECK(observer);
533
534  webrtc::MediaConstraintsInterface::Constraints optional_constraints =
535      constraints->GetOptional();
536  std::string constraint_value;
537  if (optional_constraints.FindFirst(kWebRtcLoggingConstraint,
538                                     &constraint_value)) {
539    std::string url = web_frame->document().url().spec();
540    RenderThreadImpl::current()->GetIOMessageLoopProxy()->PostTask(
541        FROM_HERE, base::Bind(
542            &InitWebRtcLogging,
543            constraint_value,
544            url));
545  }
546
547  scoped_refptr<P2PPortAllocatorFactory> pa_factory =
548        new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
549            p2p_socket_dispatcher_.get(),
550            network_manager_,
551            socket_factory_.get(),
552            web_frame);
553
554  PeerConnectionIdentityService* identity_service =
555      PeerConnectionIdentityService::Create(
556          GURL(web_frame->document().url().spec()).GetOrigin());
557
558  return pc_factory_->CreatePeerConnection(ice_servers,
559                                           constraints,
560                                           pa_factory.get(),
561                                           identity_service,
562                                           observer).get();
563}
564
565scoped_refptr<webrtc::MediaStreamInterface>
566MediaStreamDependencyFactory::CreateLocalMediaStream(
567    const std::string& label) {
568  return pc_factory_->CreateLocalMediaStream(label).get();
569}
570
571scoped_refptr<webrtc::AudioSourceInterface>
572MediaStreamDependencyFactory::CreateLocalAudioSource(
573    const webrtc::MediaConstraintsInterface* constraints) {
574  scoped_refptr<webrtc::AudioSourceInterface> source =
575      pc_factory_->CreateAudioSource(constraints).get();
576  return source;
577}
578
579scoped_refptr<webrtc::VideoSourceInterface>
580MediaStreamDependencyFactory::CreateLocalVideoSource(
581    int video_session_id,
582    bool is_screencast,
583    const webrtc::MediaConstraintsInterface* constraints) {
584  RtcVideoCapturer* capturer = new RtcVideoCapturer(
585      video_session_id, vc_manager_.get(), is_screencast);
586
587  // The video source takes ownership of |capturer|.
588  scoped_refptr<webrtc::VideoSourceInterface> source =
589      pc_factory_->CreateVideoSource(capturer, constraints).get();
590  return source;
591}
592
593scoped_refptr<WebRtcAudioCapturer>
594MediaStreamDependencyFactory::CreateWebAudioSource(
595    WebKit::WebMediaStreamSource* source) {
596  DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
597  DCHECK(GetWebRtcAudioDevice());
598
599  // Set up the source and ensure that WebAudio is driving things instead of
600  // a microphone. For WebAudio, we always create a new capturer without
601  // calling initialize(), WebAudio will re-configure the capturer later on.
602  // Pass -1 as the |render_view_id| and an empty device struct to tell the
603  // capturer not to start the default source.
604  scoped_refptr<WebRtcAudioCapturer> capturer(
605      MaybeCreateAudioCapturer(-1, StreamDeviceInfo()));
606  DCHECK(capturer.get());
607
608  scoped_refptr<WebAudioCapturerSource>
609      webaudio_capturer_source(new WebAudioCapturerSource(capturer.get()));
610  MediaStreamSourceExtraData* source_data =
611      new content::MediaStreamSourceExtraData(webaudio_capturer_source.get());
612
613  // Create a LocalAudioSource object which holds audio options.
614  // Use audio constraints where all values are false, i.e., disable
615  // echo cancellation, automatic gain control, noise suppression and
616  // high-pass filter. SetLocalAudioSource() affects core audio parts in
617  // third_party/Libjingle.
618  WebAudioConstraints webaudio_audio_constraints_all_false;
619  source_data->SetLocalAudioSource(
620      CreateLocalAudioSource(&webaudio_audio_constraints_all_false).get());
621  source->setExtraData(source_data);
622
623  // Replace the default source with WebAudio as source instead.
624  source->addAudioConsumer(webaudio_capturer_source.get());
625
626  return capturer;
627}
628
629scoped_refptr<webrtc::VideoTrackInterface>
630MediaStreamDependencyFactory::CreateLocalVideoTrack(
631    const std::string& id,
632    webrtc::VideoSourceInterface* source) {
633  return pc_factory_->CreateVideoTrack(id, source).get();
634}
635
636scoped_refptr<webrtc::VideoTrackInterface>
637MediaStreamDependencyFactory::CreateLocalVideoTrack(
638    const std::string& id, cricket::VideoCapturer* capturer) {
639  if (!capturer) {
640    LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
641    return NULL;
642  }
643
644  // Create video source from the |capturer|.
645  scoped_refptr<webrtc::VideoSourceInterface> source =
646      pc_factory_->CreateVideoSource(capturer, NULL).get();
647
648  // Create native track from the source.
649  return pc_factory_->CreateVideoTrack(id, source.get()).get();
650}
651
652scoped_refptr<webrtc::AudioTrackInterface>
653MediaStreamDependencyFactory::CreateLocalAudioTrack(
654    const std::string& id,
655    const scoped_refptr<WebRtcAudioCapturer>& capturer,
656    webrtc::AudioSourceInterface* source) {
657  // TODO(xians): Merge |source| to the capturer(). We can't do this today
658  // because only one capturer() is supported while one |source| is created
659  // for each audio track.
660  scoped_refptr<WebRtcLocalAudioTrack> audio_track(
661      WebRtcLocalAudioTrack::Create(id, capturer, source));
662  // Add the WebRtcAudioDevice as the sink to the local audio track.
663  audio_track->AddSink(GetWebRtcAudioDevice());
664  // Start the audio track. This will hook the |audio_track| to the capturer
665  // as the sink of the audio, and only start the source of the capturer if
666  // it is the first audio track connecting to the capturer.
667  audio_track->Start();
668  return audio_track;
669}
670
671webrtc::SessionDescriptionInterface*
672MediaStreamDependencyFactory::CreateSessionDescription(
673    const std::string& type,
674    const std::string& sdp,
675    webrtc::SdpParseError* error) {
676  return webrtc::CreateSessionDescription(type, sdp, error);
677}
678
679webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate(
680    const std::string& sdp_mid,
681    int sdp_mline_index,
682    const std::string& sdp) {
683  return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
684}
685
686WebRtcAudioDeviceImpl*
687MediaStreamDependencyFactory::GetWebRtcAudioDevice() {
688  return audio_device_.get();
689}
690
691void MediaStreamDependencyFactory::InitializeWorkerThread(
692    talk_base::Thread** thread,
693    base::WaitableEvent* event) {
694  jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
695  jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
696  *thread = jingle_glue::JingleThreadWrapper::current();
697  event->Signal();
698}
699
700void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
701    base::WaitableEvent* event) {
702  DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
703  network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
704  event->Signal();
705}
706
707void MediaStreamDependencyFactory::DeleteIpcNetworkManager() {
708  DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
709  delete network_manager_;
710  network_manager_ = NULL;
711}
712
713bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
714  DCHECK(CalledOnValidThread());
715  if (PeerConnectionFactoryCreated())
716    return true;
717
718  if (!signaling_thread_) {
719    jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
720    jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
721    signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
722    CHECK(signaling_thread_);
723  }
724
725  if (!worker_thread_) {
726    if (!chrome_worker_thread_.IsRunning()) {
727      if (!chrome_worker_thread_.Start()) {
728        LOG(ERROR) << "Could not start worker thread";
729        signaling_thread_ = NULL;
730        return false;
731      }
732    }
733    base::WaitableEvent event(true, false);
734    chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
735        &MediaStreamDependencyFactory::InitializeWorkerThread,
736        base::Unretained(this),
737        &worker_thread_,
738        &event));
739    event.Wait();
740    DCHECK(worker_thread_);
741  }
742
743  if (!network_manager_) {
744    base::WaitableEvent event(true, false);
745    chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
746        &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
747        base::Unretained(this),
748        &event));
749    event.Wait();
750  }
751
752  if (!socket_factory_) {
753    socket_factory_.reset(
754        new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
755  }
756
757  // Init SSL, which will be needed by PeerConnection.
758#if defined(USE_OPENSSL)
759  if (!talk_base::InitializeSSL()) {
760    LOG(ERROR) << "Failed on InitializeSSL.";
761    return false;
762  }
763#else
764  // TODO(ronghuawu): Replace this call with InitializeSSL.
765  net::EnsureNSSSSLInit();
766#endif
767
768  if (!CreatePeerConnectionFactory()) {
769    LOG(ERROR) << "Could not create PeerConnection factory";
770    return false;
771  }
772  return true;
773}
774
775void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() {
776  pc_factory_ = NULL;
777  if (network_manager_) {
778    // The network manager needs to free its resources on the thread they were
779    // created, which is the worked thread.
780    if (chrome_worker_thread_.IsRunning()) {
781      chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
782          &MediaStreamDependencyFactory::DeleteIpcNetworkManager,
783          base::Unretained(this)));
784      // Stopping the thread will wait until all tasks have been
785      // processed before returning. We wait for the above task to finish before
786      // letting the the function continue to avoid any potential race issues.
787      chrome_worker_thread_.Stop();
788    } else {
789      NOTREACHED() << "Worker thread not running.";
790    }
791  }
792}
793
794scoped_refptr<WebRtcAudioCapturer>
795MediaStreamDependencyFactory::MaybeCreateAudioCapturer(
796    int render_view_id,
797    const StreamDeviceInfo& device_info) {
798  scoped_refptr<WebRtcAudioCapturer> capturer;
799  if (render_view_id != -1) {
800    // From a normal getUserMedia, re-use the existing default capturer.
801    capturer = GetWebRtcAudioDevice()->GetDefaultCapturer();
802  }
803  // If the default capturer does not exist or |render_view_id| == -1, create
804  // a new capturer.
805  bool is_new_capturer = false;
806  if (!capturer.get()) {
807    capturer = WebRtcAudioCapturer::CreateCapturer();
808    is_new_capturer = true;
809  }
810
811  if (!capturer->Initialize(
812          render_view_id,
813          static_cast<media::ChannelLayout>(device_info.device.channel_layout),
814          device_info.device.sample_rate, device_info.session_id,
815          device_info.device.id)) {
816    return NULL;
817  }
818
819  // Add the capturer to the WebRtcAudioDeviceImpl if it is a new capturer.
820  if (is_new_capturer)
821    GetWebRtcAudioDevice()->AddAudioCapturer(capturer);
822
823  return capturer;
824}
825
826}  // namespace content
827