1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6
7#include <vector>
8
9#include "base/command_line.h"
10#include "base/strings/utf_string_conversions.h"
11#include "base/synchronization/waitable_event.h"
12#include "content/common/media/media_stream_messages.h"
13#include "content/public/common/content_switches.h"
14#include "content/renderer/media/media_stream.h"
15#include "content/renderer/media/media_stream_audio_processor.h"
16#include "content/renderer/media/media_stream_audio_processor_options.h"
17#include "content/renderer/media/media_stream_audio_source.h"
18#include "content/renderer/media/media_stream_video_source.h"
19#include "content/renderer/media/media_stream_video_track.h"
20#include "content/renderer/media/peer_connection_identity_service.h"
21#include "content/renderer/media/rtc_media_constraints.h"
22#include "content/renderer/media/rtc_peer_connection_handler.h"
23#include "content/renderer/media/rtc_video_decoder_factory.h"
24#include "content/renderer/media/rtc_video_encoder_factory.h"
25#include "content/renderer/media/webaudio_capturer_source.h"
26#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27#include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28#include "content/renderer/media/webrtc_audio_device_impl.h"
29#include "content/renderer/media/webrtc_local_audio_track.h"
30#include "content/renderer/media/webrtc_uma_histograms.h"
31#include "content/renderer/p2p/ipc_network_manager.h"
32#include "content/renderer/p2p/ipc_socket_factory.h"
33#include "content/renderer/p2p/port_allocator.h"
34#include "content/renderer/render_thread_impl.h"
35#include "jingle/glue/thread_wrapper.h"
36#include "media/filters/gpu_video_accelerator_factories.h"
37#include "third_party/WebKit/public/platform/WebMediaConstraints.h"
38#include "third_party/WebKit/public/platform/WebMediaStream.h"
39#include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
40#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
41#include "third_party/WebKit/public/platform/WebURL.h"
42#include "third_party/WebKit/public/web/WebDocument.h"
43#include "third_party/WebKit/public/web/WebFrame.h"
44#include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
45
46#if defined(USE_OPENSSL)
47#include "third_party/libjingle/source/talk/base/ssladapter.h"
48#else
49#include "net/socket/nss_ssl_util.h"
50#endif
51
52#if defined(OS_ANDROID)
53#include "media/base/android/media_codec_bridge.h"
54#endif
55
56namespace content {
57
58// Map of corresponding media constraints and platform effects.
59struct {
60  const char* constraint;
61  const media::AudioParameters::PlatformEffectsMask effect;
62} const kConstraintEffectMap[] = {
63  { content::kMediaStreamAudioDucking,
64    media::AudioParameters::DUCKING },
65  { webrtc::MediaConstraintsInterface::kEchoCancellation,
66    media::AudioParameters::ECHO_CANCELLER },
67};
68
69// If any platform effects are available, check them against the constraints.
70// Disable effects to match false constraints, but if a constraint is true, set
71// the constraint to false to later disable the software effect.
72//
73// This function may modify both |constraints| and |effects|.
74void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
75                                    int* effects) {
76  if (*effects != media::AudioParameters::NO_EFFECTS) {
77    for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
78      bool value;
79      size_t is_mandatory = 0;
80      if (!webrtc::FindConstraint(constraints,
81                                  kConstraintEffectMap[i].constraint,
82                                  &value,
83                                  &is_mandatory) || !value) {
84        // If the constraint is false, or does not exist, disable the platform
85        // effect.
86        *effects &= ~kConstraintEffectMap[i].effect;
87        DVLOG(1) << "Disabling platform effect: "
88                 << kConstraintEffectMap[i].effect;
89      } else if (*effects & kConstraintEffectMap[i].effect) {
90        // If the constraint is true, leave the platform effect enabled, and
91        // set the constraint to false to later disable the software effect.
92        if (is_mandatory) {
93          constraints->AddMandatory(kConstraintEffectMap[i].constraint,
94              webrtc::MediaConstraintsInterface::kValueFalse, true);
95        } else {
96          constraints->AddOptional(kConstraintEffectMap[i].constraint,
97              webrtc::MediaConstraintsInterface::kValueFalse, true);
98        }
99        DVLOG(1) << "Disabling constraint: "
100                 << kConstraintEffectMap[i].constraint;
101      }
102    }
103  }
104}
105
106class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
107 public:
108  P2PPortAllocatorFactory(
109      P2PSocketDispatcher* socket_dispatcher,
110      talk_base::NetworkManager* network_manager,
111      talk_base::PacketSocketFactory* socket_factory,
112      blink::WebFrame* web_frame)
113      : socket_dispatcher_(socket_dispatcher),
114        network_manager_(network_manager),
115        socket_factory_(socket_factory),
116        web_frame_(web_frame) {
117  }
118
119  virtual cricket::PortAllocator* CreatePortAllocator(
120      const std::vector<StunConfiguration>& stun_servers,
121      const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
122    CHECK(web_frame_);
123    P2PPortAllocator::Config config;
124    if (stun_servers.size() > 0) {
125      config.stun_server = stun_servers[0].server.hostname();
126      config.stun_server_port = stun_servers[0].server.port();
127    }
128    config.legacy_relay = false;
129    for (size_t i = 0; i < turn_configurations.size(); ++i) {
130      P2PPortAllocator::Config::RelayServerConfig relay_config;
131      relay_config.server_address = turn_configurations[i].server.hostname();
132      relay_config.port = turn_configurations[i].server.port();
133      relay_config.username = turn_configurations[i].username;
134      relay_config.password = turn_configurations[i].password;
135      relay_config.transport_type = turn_configurations[i].transport_type;
136      relay_config.secure = turn_configurations[i].secure;
137      config.relays.push_back(relay_config);
138    }
139
140    // Use first turn server as the stun server.
141    if (turn_configurations.size() > 0) {
142      config.stun_server = config.relays[0].server_address;
143      config.stun_server_port = config.relays[0].port;
144    }
145
146    return new P2PPortAllocator(
147        web_frame_, socket_dispatcher_.get(), network_manager_,
148        socket_factory_, config);
149  }
150
151 protected:
152  virtual ~P2PPortAllocatorFactory() {}
153
154 private:
155  scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
156  // |network_manager_| and |socket_factory_| are a weak references, owned by
157  // PeerConnectionDependencyFactory.
158  talk_base::NetworkManager* network_manager_;
159  talk_base::PacketSocketFactory* socket_factory_;
160  // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
161  blink::WebFrame* web_frame_;
162};
163
164PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
165    P2PSocketDispatcher* p2p_socket_dispatcher)
166    : network_manager_(NULL),
167      p2p_socket_dispatcher_(p2p_socket_dispatcher),
168      signaling_thread_(NULL),
169      worker_thread_(NULL),
170      chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
171}
172
173PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
174  CleanupPeerConnectionFactory();
175  if (aec_dump_message_filter_)
176    aec_dump_message_filter_->RemoveDelegate(this);
177}
178
179blink::WebRTCPeerConnectionHandler*
180PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
181    blink::WebRTCPeerConnectionHandlerClient* client) {
182  // Save histogram data so we can see how much PeerConnetion is used.
183  // The histogram counts the number of calls to the JS API
184  // webKitRTCPeerConnection.
185  UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
186
187  return new RTCPeerConnectionHandler(client, this);
188}
189
190bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
191    int render_view_id,
192    const blink::WebMediaConstraints& audio_constraints,
193    MediaStreamAudioSource* source_data) {
194  DVLOG(1) << "InitializeMediaStreamAudioSources()";
195
196  // Do additional source initialization if the audio source is a valid
197  // microphone or tab audio.
198  RTCMediaConstraints native_audio_constraints(audio_constraints);
199  MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
200
201  StreamDeviceInfo device_info = source_data->device_info();
202  RTCMediaConstraints constraints = native_audio_constraints;
203  // May modify both |constraints| and |effects|.
204  HarmonizeConstraintsAndEffects(&constraints,
205                                 &device_info.device.input.effects);
206
207  scoped_refptr<WebRtcAudioCapturer> capturer(
208      CreateAudioCapturer(render_view_id, device_info, audio_constraints,
209                          source_data));
210  if (!capturer.get()) {
211    DLOG(WARNING) << "Failed to create the capturer for device "
212        << device_info.device.id;
213    // TODO(xians): Don't we need to check if source_observer is observing
214    // something? If not, then it looks like we have a leak here.
215    // OTOH, if it _is_ observing something, then the callback might
216    // be called multiple times which is likely also a bug.
217    return false;
218  }
219  source_data->SetAudioCapturer(capturer);
220
221  // Creates a LocalAudioSource object which holds audio options.
222  // TODO(xians): The option should apply to the track instead of the source.
223  // TODO(perkj): Move audio constraints parsing to Chrome.
224  // Currently there are a few constraints that are parsed by libjingle and
225  // the state is set to ended if parsing fails.
226  scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
227      CreateLocalAudioSource(&constraints).get());
228  if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
229    DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
230    return false;
231  }
232  source_data->SetLocalAudioSource(rtc_source);
233  return true;
234}
235
236WebRtcVideoCapturerAdapter*
237PeerConnectionDependencyFactory::CreateVideoCapturer(
238    bool is_screeencast) {
239  // We need to make sure the libjingle thread wrappers have been created
240  // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
241  // since the base class of WebRtcVideoCapturerAdapter is a
242  // cricket::VideoCapturer and it uses the libjingle thread wrappers.
243  if (!GetPcFactory())
244    return NULL;
245  return new WebRtcVideoCapturerAdapter(is_screeencast);
246}
247
248scoped_refptr<webrtc::VideoSourceInterface>
249PeerConnectionDependencyFactory::CreateVideoSource(
250    cricket::VideoCapturer* capturer,
251    const blink::WebMediaConstraints& constraints) {
252  RTCMediaConstraints webrtc_constraints(constraints);
253  scoped_refptr<webrtc::VideoSourceInterface> source =
254      GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
255  return source;
256}
257
258const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
259PeerConnectionDependencyFactory::GetPcFactory() {
260  if (!pc_factory_)
261    CreatePeerConnectionFactory();
262  CHECK(pc_factory_);
263  return pc_factory_;
264}
265
266void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
267  DCHECK(!pc_factory_.get());
268  DCHECK(!signaling_thread_);
269  DCHECK(!worker_thread_);
270  DCHECK(!network_manager_);
271  DCHECK(!socket_factory_);
272  DCHECK(!chrome_worker_thread_.IsRunning());
273
274  DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
275
276  jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
277  jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
278  signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
279  CHECK(signaling_thread_);
280
281  CHECK(chrome_worker_thread_.Start());
282
283  base::WaitableEvent start_worker_event(true, false);
284  chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
285      &PeerConnectionDependencyFactory::InitializeWorkerThread,
286      base::Unretained(this),
287      &worker_thread_,
288      &start_worker_event));
289  start_worker_event.Wait();
290  CHECK(worker_thread_);
291
292  base::WaitableEvent create_network_manager_event(true, false);
293  chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
294      &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
295      base::Unretained(this),
296      &create_network_manager_event));
297  create_network_manager_event.Wait();
298
299  socket_factory_.reset(
300      new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
301
302  // Init SSL, which will be needed by PeerConnection.
303#if defined(USE_OPENSSL)
304  if (!talk_base::InitializeSSL()) {
305    LOG(ERROR) << "Failed on InitializeSSL.";
306    NOTREACHED();
307    return;
308  }
309#else
310  // TODO(ronghuawu): Replace this call with InitializeSSL.
311  net::EnsureNSSSSLInit();
312#endif
313
314  scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
315  scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
316
317  const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
318  scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories =
319      RenderThreadImpl::current()->GetGpuFactories();
320  if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
321    if (gpu_factories)
322      decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
323  }
324
325  if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
326    if (gpu_factories)
327      encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
328  }
329
330#if defined(OS_ANDROID)
331  if (!media::MediaCodecBridge::SupportsSetParameters())
332    encoder_factory.reset();
333#endif
334
335  EnsureWebRtcAudioDeviceImpl();
336
337  scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
338      webrtc::CreatePeerConnectionFactory(worker_thread_,
339                                          signaling_thread_,
340                                          audio_device_.get(),
341                                          encoder_factory.release(),
342                                          decoder_factory.release()));
343  CHECK(factory);
344
345  pc_factory_ = factory;
346  webrtc::PeerConnectionFactoryInterface::Options factory_options;
347  factory_options.disable_sctp_data_channels = false;
348  factory_options.disable_encryption =
349      cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
350  pc_factory_->SetOptions(factory_options);
351
352  // TODO(xians): Remove the following code after kDisableAudioTrackProcessing
353  // is removed.
354  if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) {
355    aec_dump_message_filter_ = AecDumpMessageFilter::Get();
356    // In unit tests not creating a message filter, |aec_dump_message_filter_|
357    // will be NULL. We can just ignore that. Other unit tests and browser tests
358    // ensure that we do get the filter when we should.
359    if (aec_dump_message_filter_)
360      aec_dump_message_filter_->AddDelegate(this);
361  }
362}
363
364bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
365  return pc_factory_.get() != NULL;
366}
367
368scoped_refptr<webrtc::PeerConnectionInterface>
369PeerConnectionDependencyFactory::CreatePeerConnection(
370    const webrtc::PeerConnectionInterface::IceServers& ice_servers,
371    const webrtc::MediaConstraintsInterface* constraints,
372    blink::WebFrame* web_frame,
373    webrtc::PeerConnectionObserver* observer) {
374  CHECK(web_frame);
375  CHECK(observer);
376  if (!GetPcFactory())
377    return NULL;
378
379  scoped_refptr<P2PPortAllocatorFactory> pa_factory =
380        new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
381            p2p_socket_dispatcher_.get(),
382            network_manager_,
383            socket_factory_.get(),
384            web_frame);
385
386  PeerConnectionIdentityService* identity_service =
387      new PeerConnectionIdentityService(
388          GURL(web_frame->document().url().spec()).GetOrigin());
389
390  return GetPcFactory()->CreatePeerConnection(ice_servers,
391                                            constraints,
392                                            pa_factory.get(),
393                                            identity_service,
394                                            observer).get();
395}
396
397scoped_refptr<webrtc::MediaStreamInterface>
398PeerConnectionDependencyFactory::CreateLocalMediaStream(
399    const std::string& label) {
400  return GetPcFactory()->CreateLocalMediaStream(label).get();
401}
402
403scoped_refptr<webrtc::AudioSourceInterface>
404PeerConnectionDependencyFactory::CreateLocalAudioSource(
405    const webrtc::MediaConstraintsInterface* constraints) {
406  scoped_refptr<webrtc::AudioSourceInterface> source =
407      GetPcFactory()->CreateAudioSource(constraints).get();
408  return source;
409}
410
411void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
412    const blink::WebMediaStreamTrack& track) {
413  blink::WebMediaStreamSource source = track.source();
414  DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
415  MediaStreamAudioSource* source_data =
416      static_cast<MediaStreamAudioSource*>(source.extraData());
417
418  scoped_refptr<WebAudioCapturerSource> webaudio_source;
419  if (!source_data) {
420    if (source.requiresAudioConsumer()) {
421      // We're adding a WebAudio MediaStream.
422      // Create a specific capturer for each WebAudio consumer.
423      webaudio_source = CreateWebAudioSource(&source);
424      source_data =
425          static_cast<MediaStreamAudioSource*>(source.extraData());
426    } else {
427      // TODO(perkj): Implement support for sources from
428      // remote MediaStreams.
429      NOTIMPLEMENTED();
430      return;
431    }
432  }
433
434  // Creates an adapter to hold all the libjingle objects.
435  scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
436      WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
437                                           source_data->local_audio_source()));
438  static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
439      track.isEnabled());
440
441  // TODO(xians): Merge |source| to the capturer(). We can't do this today
442  // because only one capturer() is supported while one |source| is created
443  // for each audio track.
444  scoped_ptr<WebRtcLocalAudioTrack> audio_track(
445      new WebRtcLocalAudioTrack(adapter,
446                                source_data->GetAudioCapturer(),
447                                webaudio_source));
448
449  StartLocalAudioTrack(audio_track.get());
450
451  // Pass the ownership of the native local audio track to the blink track.
452  blink::WebMediaStreamTrack writable_track = track;
453  writable_track.setExtraData(audio_track.release());
454}
455
456void PeerConnectionDependencyFactory::StartLocalAudioTrack(
457    WebRtcLocalAudioTrack* audio_track) {
458  // Add the WebRtcAudioDevice as the sink to the local audio track.
459  // TODO(xians): Remove the following line of code after the APM in WebRTC is
460  // completely deprecated. See http://crbug/365672.
461  if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled())
462    audio_track->AddSink(GetWebRtcAudioDevice());
463
464  // Start the audio track. This will hook the |audio_track| to the capturer
465  // as the sink of the audio, and only start the source of the capturer if
466  // it is the first audio track connecting to the capturer.
467  audio_track->Start();
468}
469
470scoped_refptr<WebAudioCapturerSource>
471PeerConnectionDependencyFactory::CreateWebAudioSource(
472    blink::WebMediaStreamSource* source) {
473  DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
474
475  scoped_refptr<WebAudioCapturerSource>
476      webaudio_capturer_source(new WebAudioCapturerSource());
477  MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
478
479  // Use the current default capturer for the WebAudio track so that the
480  // WebAudio track can pass a valid delay value and |need_audio_processing|
481  // flag to PeerConnection.
482  // TODO(xians): Remove this after moving APM to Chrome.
483  if (GetWebRtcAudioDevice()) {
484    source_data->SetAudioCapturer(
485        GetWebRtcAudioDevice()->GetDefaultCapturer());
486  }
487
488  // Create a LocalAudioSource object which holds audio options.
489  // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
490  source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
491  source->setExtraData(source_data);
492
493  // Replace the default source with WebAudio as source instead.
494  source->addAudioConsumer(webaudio_capturer_source.get());
495
496  return webaudio_capturer_source;
497}
498
499scoped_refptr<webrtc::VideoTrackInterface>
500PeerConnectionDependencyFactory::CreateLocalVideoTrack(
501    const std::string& id,
502    webrtc::VideoSourceInterface* source) {
503  return GetPcFactory()->CreateVideoTrack(id, source).get();
504}
505
506scoped_refptr<webrtc::VideoTrackInterface>
507PeerConnectionDependencyFactory::CreateLocalVideoTrack(
508    const std::string& id, cricket::VideoCapturer* capturer) {
509  if (!capturer) {
510    LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
511    return NULL;
512  }
513
514  // Create video source from the |capturer|.
515  scoped_refptr<webrtc::VideoSourceInterface> source =
516      GetPcFactory()->CreateVideoSource(capturer, NULL).get();
517
518  // Create native track from the source.
519  return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
520}
521
522webrtc::SessionDescriptionInterface*
523PeerConnectionDependencyFactory::CreateSessionDescription(
524    const std::string& type,
525    const std::string& sdp,
526    webrtc::SdpParseError* error) {
527  return webrtc::CreateSessionDescription(type, sdp, error);
528}
529
530webrtc::IceCandidateInterface*
531PeerConnectionDependencyFactory::CreateIceCandidate(
532    const std::string& sdp_mid,
533    int sdp_mline_index,
534    const std::string& sdp) {
535  return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
536}
537
538WebRtcAudioDeviceImpl*
539PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
540  return audio_device_.get();
541}
542
543void PeerConnectionDependencyFactory::InitializeWorkerThread(
544    talk_base::Thread** thread,
545    base::WaitableEvent* event) {
546  jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
547  jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
548  *thread = jingle_glue::JingleThreadWrapper::current();
549  event->Signal();
550}
551
552void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
553    base::WaitableEvent* event) {
554  DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
555  network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
556  event->Signal();
557}
558
559void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
560  DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
561  delete network_manager_;
562  network_manager_ = NULL;
563}
564
565void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
566  pc_factory_ = NULL;
567  if (network_manager_) {
568    // The network manager needs to free its resources on the thread they were
569    // created, which is the worked thread.
570    if (chrome_worker_thread_.IsRunning()) {
571      chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
572          &PeerConnectionDependencyFactory::DeleteIpcNetworkManager,
573          base::Unretained(this)));
574      // Stopping the thread will wait until all tasks have been
575      // processed before returning. We wait for the above task to finish before
576      // letting the the function continue to avoid any potential race issues.
577      chrome_worker_thread_.Stop();
578    } else {
579      NOTREACHED() << "Worker thread not running.";
580    }
581  }
582}
583
584scoped_refptr<WebRtcAudioCapturer>
585PeerConnectionDependencyFactory::CreateAudioCapturer(
586    int render_view_id,
587    const StreamDeviceInfo& device_info,
588    const blink::WebMediaConstraints& constraints,
589    MediaStreamAudioSource* audio_source) {
590  // TODO(xians): Handle the cases when gUM is called without a proper render
591  // view, for example, by an extension.
592  DCHECK_GE(render_view_id, 0);
593
594  EnsureWebRtcAudioDeviceImpl();
595  DCHECK(GetWebRtcAudioDevice());
596  return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
597                                             constraints,
598                                             GetWebRtcAudioDevice(),
599                                             audio_source);
600}
601
602void PeerConnectionDependencyFactory::AddNativeAudioTrackToBlinkTrack(
603    webrtc::MediaStreamTrackInterface* native_track,
604    const blink::WebMediaStreamTrack& webkit_track,
605    bool is_local_track) {
606  DCHECK(!webkit_track.isNull() && !webkit_track.extraData());
607  DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio,
608            webkit_track.source().type());
609  blink::WebMediaStreamTrack track = webkit_track;
610
611  DVLOG(1) << "AddNativeTrackToBlinkTrack() audio";
612  track.setExtraData(
613      new MediaStreamTrack(
614          static_cast<webrtc::AudioTrackInterface*>(native_track),
615          is_local_track));
616}
617
618scoped_refptr<base::MessageLoopProxy>
619PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
620  DCHECK(CalledOnValidThread());
621  return chrome_worker_thread_.message_loop_proxy();
622}
623
624void PeerConnectionDependencyFactory::OnAecDumpFile(
625    const IPC::PlatformFileForTransit& file_handle) {
626  DCHECK(CalledOnValidThread());
627  DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
628  DCHECK(PeerConnectionFactoryCreated());
629
630  base::File file = IPC::PlatformFileForTransitToFile(file_handle);
631  DCHECK(file.IsValid());
632
633  // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
634  // fails, |aec_dump_file| will be closed.
635  if (!GetPcFactory()->StartAecDump(file.TakePlatformFile()))
636    VLOG(1) << "Could not start AEC dump.";
637}
638
639void PeerConnectionDependencyFactory::OnDisableAecDump() {
640  DCHECK(CalledOnValidThread());
641  DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
642  // Do nothing. We never disable AEC dump for non-track-processing case.
643}
644
645void PeerConnectionDependencyFactory::OnIpcClosing() {
646  DCHECK(CalledOnValidThread());
647  aec_dump_message_filter_ = NULL;
648}
649
650void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
651  if (audio_device_)
652    return;
653
654  audio_device_ = new WebRtcAudioDeviceImpl();
655}
656
657}  // namespace content
658