1/*
2 * libjingle
3 * Copyright 2012, Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/app/webrtc/mediastreamsignaling.h"
29
30#include <vector>
31
32#include "talk/app/webrtc/audiotrack.h"
33#include "talk/app/webrtc/mediastreamproxy.h"
34#include "talk/app/webrtc/mediaconstraintsinterface.h"
35#include "talk/app/webrtc/mediastreamtrackproxy.h"
36#include "talk/app/webrtc/videotrack.h"
37#include "talk/base/bytebuffer.h"
38
39static const char kDefaultStreamLabel[] = "default";
40static const char kDefaultAudioTrackLabel[] = "defaulta0";
41static const char kDefaultVideoTrackLabel[] = "defaultv0";
42
43namespace webrtc {
44
45using talk_base::scoped_ptr;
46using talk_base::scoped_refptr;
47
48// Supported MediaConstraints.
49const char MediaConstraintsInterface::kOfferToReceiveAudio[] =
50    "OfferToReceiveAudio";
51const char MediaConstraintsInterface::kOfferToReceiveVideo[] =
52    "OfferToReceiveVideo";
53const char MediaConstraintsInterface::kIceRestart[] =
54    "IceRestart";
55const char MediaConstraintsInterface::kUseRtpMux[] =
56    "googUseRtpMUX";
57const char MediaConstraintsInterface::kVoiceActivityDetection[] =
58    "VoiceActivityDetection";
59
60static bool ParseConstraints(
61    const MediaConstraintsInterface* constraints,
62    cricket::MediaSessionOptions* options, bool is_answer) {
63  bool value;
64  size_t mandatory_constraints_satisfied = 0;
65
66  if (FindConstraint(constraints,
67                     MediaConstraintsInterface::kOfferToReceiveAudio,
68                     &value, &mandatory_constraints_satisfied)) {
69    // |options-|has_audio| can only change from false to
70    // true, but never change from true to false. This is to make sure
71    // CreateOffer / CreateAnswer doesn't remove a media content
72    // description that has been created.
73    options->has_audio |= value;
74  } else {
75    // kOfferToReceiveAudio defaults to true according to spec.
76    options->has_audio = true;
77  }
78
79  if (FindConstraint(constraints,
80                     MediaConstraintsInterface::kOfferToReceiveVideo,
81                     &value, &mandatory_constraints_satisfied)) {
82    // |options->has_video| can only change from false to
83    // true, but never change from true to false. This is to make sure
84    // CreateOffer / CreateAnswer doesn't remove a media content
85    // description that has been created.
86    options->has_video |= value;
87  } else {
88    // kOfferToReceiveVideo defaults to false according to spec. But
89    // if it is an answer and video is offered, we should still accept video
90    // per default.
91    options->has_video |= is_answer;
92  }
93
94  if (FindConstraint(constraints,
95                     MediaConstraintsInterface::kVoiceActivityDetection,
96                     &value, &mandatory_constraints_satisfied)) {
97    options->vad_enabled = value;
98  }
99
100  if (FindConstraint(constraints,
101                     MediaConstraintsInterface::kUseRtpMux,
102                     &value, &mandatory_constraints_satisfied)) {
103    options->bundle_enabled = value;
104  } else {
105    // kUseRtpMux defaults to true according to spec.
106    options->bundle_enabled = true;
107  }
108  if (FindConstraint(constraints,
109                     MediaConstraintsInterface::kIceRestart,
110                     &value, &mandatory_constraints_satisfied)) {
111    options->transport_options.ice_restart = value;
112  } else {
113    // kIceRestart defaults to false according to spec.
114    options->transport_options.ice_restart = false;
115  }
116
117  if (!constraints) {
118    return true;
119  }
120  return mandatory_constraints_satisfied == constraints->GetMandatory().size();
121}
122
123// Returns true if if at least one media content is present and
124// |options.bundle_enabled| is true.
125// Bundle will be enabled  by default if at least one media content is present
126// and the constraint kUseRtpMux has not disabled bundle.
127static bool EvaluateNeedForBundle(const cricket::MediaSessionOptions& options) {
128  return options.bundle_enabled &&
129      (options.has_audio || options.has_video || options.has_data());
130}
131
132// Factory class for creating remote MediaStreams and MediaStreamTracks.
133class RemoteMediaStreamFactory {
134 public:
135  explicit RemoteMediaStreamFactory(talk_base::Thread* signaling_thread)
136      : signaling_thread_(signaling_thread) {
137  }
138
139  talk_base::scoped_refptr<MediaStreamInterface> CreateMediaStream(
140      const std::string& stream_label) {
141    return MediaStreamProxy::Create(
142        signaling_thread_, MediaStream::Create(stream_label));
143  }
144
145  AudioTrackInterface* AddAudioTrack(webrtc::MediaStreamInterface* stream,
146                                     const std::string& track_id) {
147    return AddTrack<AudioTrackInterface, AudioTrack, AudioTrackProxy>(stream,
148                                                                      track_id);
149  }
150
151  VideoTrackInterface* AddVideoTrack(webrtc::MediaStreamInterface* stream,
152                                     const std::string& track_id) {
153    return AddTrack<VideoTrackInterface, VideoTrack, VideoTrackProxy>(stream,
154                                                                      track_id);
155  }
156
157 private:
158  template <typename TI, typename T, typename TP>
159  TI* AddTrack(MediaStreamInterface* stream, const std::string& track_id) {
160    talk_base::scoped_refptr<TI> track(
161        TP::Create(signaling_thread_, T::Create(track_id, NULL)));
162    track->set_state(webrtc::MediaStreamTrackInterface::kLive);
163    if (stream->AddTrack(track)) {
164      return track;
165    }
166    return NULL;
167  }
168
169  talk_base::Thread* signaling_thread_;
170};
171
172MediaStreamSignaling::MediaStreamSignaling(
173    talk_base::Thread* signaling_thread,
174    MediaStreamSignalingObserver* stream_observer)
175    : signaling_thread_(signaling_thread),
176      data_channel_factory_(NULL),
177      stream_observer_(stream_observer),
178      local_streams_(StreamCollection::Create()),
179      remote_streams_(StreamCollection::Create()),
180      remote_stream_factory_(new RemoteMediaStreamFactory(signaling_thread)),
181      last_allocated_sctp_id_(0) {
182  options_.has_video = false;
183  options_.has_audio = false;
184}
185
186MediaStreamSignaling::~MediaStreamSignaling() {
187}
188
189void MediaStreamSignaling::TearDown() {
190  OnAudioChannelClose();
191  OnVideoChannelClose();
192  OnDataChannelClose();
193}
194
195bool MediaStreamSignaling::IsSctpIdAvailable(int id) const {
196  if (id < 0 || id > static_cast<int>(cricket::kMaxSctpSid))
197    return false;
198  for (DataChannels::const_iterator iter = data_channels_.begin();
199       iter != data_channels_.end();
200       ++iter) {
201    if (iter->second->id() == id) {
202      return false;
203    }
204  }
205  return true;
206}
207
208// Gets the first id that has not been taken by existing data
209// channels. Starting from 1.
210// Returns false if no id can be allocated.
211// TODO(jiayl): Update to some kind of even/odd random number selection when the
212// rules are fully standardized.
213bool MediaStreamSignaling::AllocateSctpId(int* id) {
214  do {
215    last_allocated_sctp_id_++;
216  } while (last_allocated_sctp_id_ <= static_cast<int>(cricket::kMaxSctpSid) &&
217           !IsSctpIdAvailable(last_allocated_sctp_id_));
218
219  if (last_allocated_sctp_id_ > static_cast<int>(cricket::kMaxSctpSid)) {
220    last_allocated_sctp_id_ = cricket::kMaxSctpSid;
221    return false;
222  }
223
224  *id = last_allocated_sctp_id_;
225  return true;
226}
227
228bool MediaStreamSignaling::AddDataChannel(DataChannel* data_channel) {
229  ASSERT(data_channel != NULL);
230  if (data_channels_.find(data_channel->label()) != data_channels_.end()) {
231    LOG(LS_ERROR) << "DataChannel with label " << data_channel->label()
232                  << " already exists.";
233    return false;
234  }
235  data_channels_[data_channel->label()] = data_channel;
236  return true;
237}
238
239bool MediaStreamSignaling::AddDataChannelFromOpenMessage(
240    const std::string& label,
241    const DataChannelInit& config) {
242  if (!data_channel_factory_) {
243    LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
244                    << "are not supported.";
245    return false;
246  }
247
248  if (data_channels_.find(label) != data_channels_.end()) {
249    LOG(LS_ERROR) << "DataChannel with label " << label
250                  << " already exists.";
251    return false;
252  }
253  scoped_refptr<DataChannel> channel(
254      data_channel_factory_->CreateDataChannel(label, &config));
255  data_channels_[label] = channel;
256  stream_observer_->OnAddDataChannel(channel);
257  // It's immediately ready to use.
258  channel->OnChannelReady(true);
259  return true;
260}
261
262bool MediaStreamSignaling::AddLocalStream(MediaStreamInterface* local_stream) {
263  if (local_streams_->find(local_stream->label()) != NULL) {
264    LOG(LS_WARNING) << "MediaStream with label " << local_stream->label()
265                    << "already exist.";
266    return false;
267  }
268  local_streams_->AddStream(local_stream);
269
270  // Find tracks that has already been configured in SDP. This can occur if a
271  // local session description that contains the MSID of these tracks is set
272  // before AddLocalStream is called. It can also occur if the local session
273  // description is not changed and RemoveLocalStream
274  // is called and later AddLocalStream is called again with the same stream.
275  AudioTrackVector audio_tracks = local_stream->GetAudioTracks();
276  for (AudioTrackVector::const_iterator it = audio_tracks.begin();
277       it != audio_tracks.end(); ++it) {
278    TrackInfos::const_iterator track_info_it =
279        local_audio_tracks_.find((*it)->id());
280    if (track_info_it != local_audio_tracks_.end()) {
281      const TrackInfo& info = track_info_it->second;
282      OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
283                       cricket::MEDIA_TYPE_AUDIO);
284    }
285  }
286
287  VideoTrackVector video_tracks = local_stream->GetVideoTracks();
288  for (VideoTrackVector::const_iterator it = video_tracks.begin();
289       it != video_tracks.end(); ++it) {
290    TrackInfos::const_iterator track_info_it =
291        local_video_tracks_.find((*it)->id());
292    if (track_info_it != local_video_tracks_.end()) {
293      const TrackInfo& info = track_info_it->second;
294      OnLocalTrackSeen(info.stream_label, info.track_id, info.ssrc,
295                       cricket::MEDIA_TYPE_VIDEO);
296    }
297  }
298  return true;
299}
300
301void MediaStreamSignaling::RemoveLocalStream(
302    MediaStreamInterface* local_stream) {
303  local_streams_->RemoveStream(local_stream);
304  stream_observer_->OnRemoveLocalStream(local_stream);
305}
306
307bool MediaStreamSignaling::GetOptionsForOffer(
308    const MediaConstraintsInterface* constraints,
309    cricket::MediaSessionOptions* options) {
310  UpdateSessionOptions();
311  if (!ParseConstraints(constraints, &options_, false)) {
312    return false;
313  }
314  options_.bundle_enabled = EvaluateNeedForBundle(options_);
315  *options = options_;
316  return true;
317}
318
319bool MediaStreamSignaling::GetOptionsForAnswer(
320    const MediaConstraintsInterface* constraints,
321    cricket::MediaSessionOptions* options) {
322  UpdateSessionOptions();
323
324  // Copy the |options_| to not let the flag MediaSessionOptions::has_audio and
325  // MediaSessionOptions::has_video affect subsequent offers.
326  cricket::MediaSessionOptions current_options = options_;
327  if (!ParseConstraints(constraints, &current_options, true)) {
328    return false;
329  }
330  current_options.bundle_enabled = EvaluateNeedForBundle(current_options);
331  *options = current_options;
332  return true;
333}
334
335// Updates or creates remote MediaStream objects given a
336// remote SessionDesription.
337// If the remote SessionDesription contains new remote MediaStreams
338// the observer OnAddStream method is called. If a remote MediaStream is missing
339// from the remote SessionDescription OnRemoveStream is called.
340void MediaStreamSignaling::OnRemoteDescriptionChanged(
341    const SessionDescriptionInterface* desc) {
342  const cricket::SessionDescription* remote_desc = desc->description();
343  talk_base::scoped_refptr<StreamCollection> new_streams(
344      StreamCollection::Create());
345
346  // Find all audio rtp streams and create corresponding remote AudioTracks
347  // and MediaStreams.
348  const cricket::ContentInfo* audio_content = GetFirstAudioContent(remote_desc);
349  if (audio_content) {
350    const cricket::AudioContentDescription* desc =
351        static_cast<const cricket::AudioContentDescription*>(
352            audio_content->description);
353    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
354    remote_info_.default_audio_track_needed =
355        desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
356  }
357
358  // Find all video rtp streams and create corresponding remote VideoTracks
359  // and MediaStreams.
360  const cricket::ContentInfo* video_content = GetFirstVideoContent(remote_desc);
361  if (video_content) {
362    const cricket::VideoContentDescription* desc =
363        static_cast<const cricket::VideoContentDescription*>(
364            video_content->description);
365    UpdateRemoteStreamsList(desc->streams(), desc->type(), new_streams);
366    remote_info_.default_video_track_needed =
367        desc->direction() == cricket::MD_SENDRECV && desc->streams().empty();
368  }
369
370  // Update the DataChannels with the information from the remote peer.
371  const cricket::ContentInfo* data_content = GetFirstDataContent(remote_desc);
372  if (data_content) {
373    const cricket::DataContentDescription* data_desc =
374        static_cast<const cricket::DataContentDescription*>(
375            data_content->description);
376    if (data_desc->protocol() == cricket::kMediaProtocolDtlsSctp) {
377      UpdateRemoteSctpDataChannels();
378    } else {
379      UpdateRemoteRtpDataChannels(data_desc->streams());
380    }
381  }
382
383  // Iterate new_streams and notify the observer about new MediaStreams.
384  for (size_t i = 0; i < new_streams->count(); ++i) {
385    MediaStreamInterface* new_stream = new_streams->at(i);
386    stream_observer_->OnAddRemoteStream(new_stream);
387  }
388
389  // Find removed MediaStreams.
390  if (remote_info_.IsDefaultMediaStreamNeeded() &&
391      remote_streams_->find(kDefaultStreamLabel) != NULL) {
392    // The default media stream already exists. No need to do anything.
393  } else {
394    UpdateEndedRemoteMediaStreams();
395    remote_info_.msid_supported |= remote_streams_->count() > 0;
396  }
397  MaybeCreateDefaultStream();
398}
399
400void MediaStreamSignaling::OnLocalDescriptionChanged(
401    const SessionDescriptionInterface* desc) {
402  const cricket::ContentInfo* audio_content =
403      GetFirstAudioContent(desc->description());
404  if (audio_content) {
405    if (audio_content->rejected) {
406      RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
407    }
408    const cricket::AudioContentDescription* audio_desc =
409        static_cast<const cricket::AudioContentDescription*>(
410            audio_content->description);
411    UpdateLocalTracks(audio_desc->streams(), audio_desc->type());
412  }
413
414  const cricket::ContentInfo* video_content =
415      GetFirstVideoContent(desc->description());
416  if (video_content) {
417    if (video_content->rejected) {
418      RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
419    }
420    const cricket::VideoContentDescription* video_desc =
421        static_cast<const cricket::VideoContentDescription*>(
422            video_content->description);
423    UpdateLocalTracks(video_desc->streams(), video_desc->type());
424  }
425
426  const cricket::ContentInfo* data_content =
427      GetFirstDataContent(desc->description());
428  if (data_content) {
429    const cricket::DataContentDescription* data_desc =
430        static_cast<const cricket::DataContentDescription*>(
431            data_content->description);
432    if (data_desc->protocol() == cricket::kMediaProtocolDtlsSctp) {
433      UpdateLocalSctpDataChannels();
434    } else {
435      UpdateLocalRtpDataChannels(data_desc->streams());
436    }
437  }
438}
439
440void MediaStreamSignaling::OnAudioChannelClose() {
441  RejectRemoteTracks(cricket::MEDIA_TYPE_AUDIO);
442}
443
444void MediaStreamSignaling::OnVideoChannelClose() {
445  RejectRemoteTracks(cricket::MEDIA_TYPE_VIDEO);
446}
447
448void MediaStreamSignaling::OnDataChannelClose() {
449  DataChannels::iterator it = data_channels_.begin();
450  for (; it != data_channels_.end(); ++it) {
451    DataChannel* data_channel = it->second;
452    data_channel->OnDataEngineClose();
453  }
454}
455
456bool MediaStreamSignaling::GetRemoteAudioTrackSsrc(
457    const std::string& track_id, uint32* ssrc) const {
458  TrackInfos::const_iterator it = remote_audio_tracks_.find(track_id);
459  if (it == remote_audio_tracks_.end()) {
460    return false;
461  }
462
463  *ssrc = it->second.ssrc;
464  return true;
465}
466
467bool MediaStreamSignaling::GetRemoteVideoTrackSsrc(
468    const std::string& track_id, uint32* ssrc) const {
469  TrackInfos::const_iterator it = remote_video_tracks_.find(track_id);
470  if (it == remote_video_tracks_.end()) {
471    return false;
472  }
473
474  *ssrc = it->second.ssrc;
475  return true;
476}
477
478void MediaStreamSignaling::UpdateSessionOptions() {
479  options_.streams.clear();
480  if (local_streams_ != NULL) {
481    for (size_t i = 0; i < local_streams_->count(); ++i) {
482      MediaStreamInterface* stream = local_streams_->at(i);
483
484      AudioTrackVector audio_tracks(stream->GetAudioTracks());
485      if (!audio_tracks.empty()) {
486        options_.has_audio = true;
487      }
488
489      // For each audio track in the stream, add it to the MediaSessionOptions.
490      for (size_t j = 0; j < audio_tracks.size(); ++j) {
491        scoped_refptr<MediaStreamTrackInterface> track(audio_tracks[j]);
492        options_.AddStream(cricket::MEDIA_TYPE_AUDIO, track->id(),
493                           stream->label());
494      }
495
496      VideoTrackVector video_tracks(stream->GetVideoTracks());
497      if (!video_tracks.empty()) {
498        options_.has_video = true;
499      }
500      // For each video track in the stream, add it to the MediaSessionOptions.
501      for (size_t j = 0; j < video_tracks.size(); ++j) {
502        scoped_refptr<MediaStreamTrackInterface> track(video_tracks[j]);
503        options_.AddStream(cricket::MEDIA_TYPE_VIDEO, track->id(),
504                           stream->label());
505      }
506    }
507  }
508
509  // Check for data channels.
510  DataChannels::const_iterator data_channel_it = data_channels_.begin();
511  for (; data_channel_it != data_channels_.end(); ++data_channel_it) {
512    const DataChannel* channel = data_channel_it->second;
513    if (channel->state() == DataChannel::kConnecting ||
514        channel->state() == DataChannel::kOpen) {
515      // |streamid| and |sync_label| are both set to the DataChannel label
516      // here so they can be signaled the same way as MediaStreams and Tracks.
517      // For MediaStreams, the sync_label is the MediaStream label and the
518      // track label is the same as |streamid|.
519      const std::string& streamid = channel->label();
520      const std::string& sync_label = channel->label();
521      options_.AddStream(cricket::MEDIA_TYPE_DATA, streamid, sync_label);
522    }
523  }
524}
525
526void MediaStreamSignaling::UpdateRemoteStreamsList(
527    const cricket::StreamParamsVec& streams,
528    cricket::MediaType media_type,
529    StreamCollection* new_streams) {
530  TrackInfos* current_tracks = GetRemoteTracks(media_type);
531
532  // Find removed tracks. Ie tracks where the track id or ssrc don't match the
533  // new StreamParam.
534  TrackInfos::iterator track_it = current_tracks->begin();
535  while (track_it != current_tracks->end()) {
536    TrackInfo info = track_it->second;
537    cricket::StreamParams params;
538    if (!cricket::GetStreamBySsrc(streams, info.ssrc, &params) ||
539        params.id != info.track_id) {
540      OnRemoteTrackRemoved(info.stream_label, info.track_id, media_type);
541      current_tracks->erase(track_it++);
542    } else {
543      ++track_it;
544    }
545  }
546
547  // Find new and active tracks.
548  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
549       it != streams.end(); ++it) {
550    // The sync_label is the MediaStream label and the |stream.id| is the
551    // track id.
552    const std::string& stream_label = it->sync_label;
553    const std::string& track_id = it->id;
554    uint32 ssrc = it->first_ssrc();
555
556    talk_base::scoped_refptr<MediaStreamInterface> stream =
557        remote_streams_->find(stream_label);
558    if (!stream) {
559      // This is a new MediaStream. Create a new remote MediaStream.
560      stream = remote_stream_factory_->CreateMediaStream(stream_label);
561      remote_streams_->AddStream(stream);
562      new_streams->AddStream(stream);
563    }
564
565    TrackInfos::iterator track_it = current_tracks->find(track_id);
566    if (track_it == current_tracks->end()) {
567      (*current_tracks)[track_id] =
568          TrackInfo(stream_label, track_id, ssrc);
569      OnRemoteTrackSeen(stream_label, track_id, it->first_ssrc(), media_type);
570    }
571  }
572}
573
574void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label,
575                                             const std::string& track_id,
576                                             uint32 ssrc,
577                                             cricket::MediaType media_type) {
578  MediaStreamInterface* stream = remote_streams_->find(stream_label);
579
580  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
581    AudioTrackInterface* audio_track =
582        remote_stream_factory_->AddAudioTrack(stream, track_id);
583    stream_observer_->OnAddRemoteAudioTrack(stream, audio_track, ssrc);
584  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
585    VideoTrackInterface* video_track =
586        remote_stream_factory_->AddVideoTrack(stream, track_id);
587    stream_observer_->OnAddRemoteVideoTrack(stream, video_track, ssrc);
588  } else {
589    ASSERT(false && "Invalid media type");
590  }
591}
592
593void MediaStreamSignaling::OnRemoteTrackRemoved(
594    const std::string& stream_label,
595    const std::string& track_id,
596    cricket::MediaType media_type) {
597  MediaStreamInterface* stream = remote_streams_->find(stream_label);
598
599  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
600    talk_base::scoped_refptr<AudioTrackInterface> audio_track =
601        stream->FindAudioTrack(track_id);
602    audio_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
603    stream->RemoveTrack(audio_track);
604    stream_observer_->OnRemoveRemoteAudioTrack(stream, audio_track);
605  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
606    talk_base::scoped_refptr<VideoTrackInterface> video_track =
607        stream->FindVideoTrack(track_id);
608    video_track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
609    stream->RemoveTrack(video_track);
610    stream_observer_->OnRemoveRemoteVideoTrack(stream, video_track);
611  } else {
612    ASSERT(false && "Invalid media type");
613  }
614}
615
616void MediaStreamSignaling::RejectRemoteTracks(cricket::MediaType media_type) {
617  TrackInfos* current_tracks = GetRemoteTracks(media_type);
618  for (TrackInfos::iterator track_it = current_tracks->begin();
619       track_it != current_tracks->end(); ++track_it) {
620    TrackInfo info = track_it->second;
621    MediaStreamInterface* stream = remote_streams_->find(info.stream_label);
622    if (media_type == cricket::MEDIA_TYPE_AUDIO) {
623      AudioTrackInterface* track = stream->FindAudioTrack(info.track_id);
624      track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
625    }
626    if (media_type == cricket::MEDIA_TYPE_VIDEO) {
627      VideoTrackInterface* track = stream->FindVideoTrack(info.track_id);
628      track->set_state(webrtc::MediaStreamTrackInterface::kEnded);
629    }
630  }
631}
632
633void MediaStreamSignaling::UpdateEndedRemoteMediaStreams() {
634  std::vector<scoped_refptr<MediaStreamInterface> > streams_to_remove;
635  for (size_t i = 0; i < remote_streams_->count(); ++i) {
636    MediaStreamInterface*stream = remote_streams_->at(i);
637    if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) {
638      streams_to_remove.push_back(stream);
639    }
640  }
641
642  std::vector<scoped_refptr<MediaStreamInterface> >::const_iterator it;
643  for (it = streams_to_remove.begin(); it != streams_to_remove.end(); ++it) {
644    remote_streams_->RemoveStream(*it);
645    stream_observer_->OnRemoveRemoteStream(*it);
646  }
647}
648
649void MediaStreamSignaling::MaybeCreateDefaultStream() {
650  if (!remote_info_.IsDefaultMediaStreamNeeded())
651    return;
652
653  bool default_created = false;
654
655  scoped_refptr<MediaStreamInterface> default_remote_stream =
656      remote_streams_->find(kDefaultStreamLabel);
657  if (default_remote_stream == NULL) {
658    default_created = true;
659    default_remote_stream =
660        remote_stream_factory_->CreateMediaStream(kDefaultStreamLabel);
661    remote_streams_->AddStream(default_remote_stream);
662  }
663  if (remote_info_.default_audio_track_needed &&
664      default_remote_stream->GetAudioTracks().size() == 0) {
665    remote_audio_tracks_[kDefaultAudioTrackLabel] =
666        TrackInfo(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0);
667    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultAudioTrackLabel, 0,
668                       cricket::MEDIA_TYPE_AUDIO);
669  }
670  if (remote_info_.default_video_track_needed &&
671      default_remote_stream->GetVideoTracks().size() == 0) {
672    remote_video_tracks_[kDefaultVideoTrackLabel] =
673        TrackInfo(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0);
674    OnRemoteTrackSeen(kDefaultStreamLabel, kDefaultVideoTrackLabel, 0,
675                       cricket::MEDIA_TYPE_VIDEO);
676  }
677  if (default_created) {
678    stream_observer_->OnAddRemoteStream(default_remote_stream);
679  }
680}
681
682MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetRemoteTracks(
683    cricket::MediaType type) {
684  if (type == cricket::MEDIA_TYPE_AUDIO)
685    return &remote_audio_tracks_;
686  else if (type == cricket::MEDIA_TYPE_VIDEO)
687    return &remote_video_tracks_;
688  ASSERT(false && "Unknown MediaType");
689  return NULL;
690}
691
692MediaStreamSignaling::TrackInfos* MediaStreamSignaling::GetLocalTracks(
693    cricket::MediaType media_type) {
694  ASSERT(media_type == cricket::MEDIA_TYPE_AUDIO ||
695         media_type == cricket::MEDIA_TYPE_VIDEO);
696
697  return (media_type == cricket::MEDIA_TYPE_AUDIO) ?
698      &local_audio_tracks_ : &local_video_tracks_;
699}
700
701void MediaStreamSignaling::UpdateLocalTracks(
702    const std::vector<cricket::StreamParams>& streams,
703    cricket::MediaType media_type) {
704  TrackInfos* current_tracks = GetLocalTracks(media_type);
705
706  // Find removed tracks. Ie tracks where the track id or ssrc don't match the
707  // new StreamParam.
708  TrackInfos::iterator track_it = current_tracks->begin();
709  while (track_it != current_tracks->end()) {
710    TrackInfo info = track_it->second;
711    cricket::StreamParams params;
712    if (!cricket::GetStreamBySsrc(streams, info.ssrc, &params) ||
713        params.id != info.track_id) {
714      OnLocalTrackRemoved(info.stream_label, info.track_id, media_type);
715      current_tracks->erase(track_it++);
716    } else {
717      ++track_it;
718    }
719  }
720
721  // Find new and active tracks.
722  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
723       it != streams.end(); ++it) {
724    // The sync_label is the MediaStream label and the |stream.id| is the
725    // track id.
726    const std::string& stream_label = it->sync_label;
727    const std::string& track_id = it->id;
728    uint32 ssrc = it->first_ssrc();
729    TrackInfos::iterator track_it =  current_tracks->find(track_id);
730    if (track_it == current_tracks->end()) {
731      (*current_tracks)[track_id] =
732          TrackInfo(stream_label, track_id, ssrc);
733      OnLocalTrackSeen(stream_label, track_id, it->first_ssrc(),
734                       media_type);
735    }
736  }
737}
738
739void MediaStreamSignaling::OnLocalTrackSeen(
740    const std::string& stream_label,
741    const std::string& track_id,
742    uint32 ssrc,
743    cricket::MediaType media_type) {
744  MediaStreamInterface* stream = local_streams_->find(stream_label);
745  if (!stream) {
746    LOG(LS_WARNING) << "An unknown local MediaStream with label "
747                    << stream_label <<  " has been configured.";
748    return;
749  }
750
751  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
752    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
753    if (!audio_track) {
754      LOG(LS_WARNING) << "An unknown local AudioTrack with id , "
755                      << track_id <<  " has been configured.";
756      return;
757    }
758    stream_observer_->OnAddLocalAudioTrack(stream, audio_track, ssrc);
759  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
760    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
761    if (!video_track) {
762      LOG(LS_WARNING) << "An unknown local VideoTrack with id , "
763                      << track_id <<  " has been configured.";
764      return;
765    }
766    stream_observer_->OnAddLocalVideoTrack(stream, video_track, ssrc);
767  } else {
768    ASSERT(false && "Invalid media type");
769  }
770}
771
772void MediaStreamSignaling::OnLocalTrackRemoved(
773    const std::string& stream_label,
774    const std::string& track_id,
775    cricket::MediaType media_type) {
776  MediaStreamInterface* stream = local_streams_->find(stream_label);
777  if (!stream) {
778    // This is the normal case. Ie RemoveLocalStream has been called and the
779    // SessionDescriptions has been renegotiated.
780    return;
781  }
782  // A track has been removed from the SessionDescription but the MediaStream
783  // is still associated with MediaStreamSignaling. This only occurs if the SDP
784  // doesn't match with the calls to AddLocalStream and RemoveLocalStream.
785
786  if (media_type == cricket::MEDIA_TYPE_AUDIO) {
787    AudioTrackInterface* audio_track = stream->FindAudioTrack(track_id);
788    if (!audio_track) {
789      return;
790    }
791    stream_observer_->OnRemoveLocalAudioTrack(stream, audio_track);
792  } else if (media_type == cricket::MEDIA_TYPE_VIDEO) {
793    VideoTrackInterface* video_track = stream->FindVideoTrack(track_id);
794    if (!video_track) {
795      return;
796    }
797    stream_observer_->OnRemoveLocalVideoTrack(stream, video_track);
798  } else {
799    ASSERT(false && "Invalid media type.");
800  }
801}
802
803void MediaStreamSignaling::UpdateLocalRtpDataChannels(
804    const cricket::StreamParamsVec& streams) {
805  std::vector<std::string> existing_channels;
806
807  // Find new and active data channels.
808  for (cricket::StreamParamsVec::const_iterator it =streams.begin();
809       it != streams.end(); ++it) {
810    // |it->sync_label| is actually the data channel label. The reason is that
811    // we use the same naming of data channels as we do for
812    // MediaStreams and Tracks.
813    // For MediaStreams, the sync_label is the MediaStream label and the
814    // track label is the same as |streamid|.
815    const std::string& channel_label = it->sync_label;
816    DataChannels::iterator data_channel_it = data_channels_.find(channel_label);
817    if (!VERIFY(data_channel_it != data_channels_.end())) {
818      continue;
819    }
820    // Set the SSRC the data channel should use for sending.
821    data_channel_it->second->SetSendSsrc(it->first_ssrc());
822    existing_channels.push_back(data_channel_it->first);
823  }
824
825  UpdateClosingDataChannels(existing_channels, true);
826}
827
828void MediaStreamSignaling::UpdateRemoteRtpDataChannels(
829    const cricket::StreamParamsVec& streams) {
830  std::vector<std::string> existing_channels;
831
832  // Find new and active data channels.
833  for (cricket::StreamParamsVec::const_iterator it = streams.begin();
834       it != streams.end(); ++it) {
835    // The data channel label is either the mslabel or the SSRC if the mslabel
836    // does not exist. Ex a=ssrc:444330170 mslabel:test1.
837    std::string label = it->sync_label.empty() ?
838        talk_base::ToString(it->first_ssrc()) : it->sync_label;
839    DataChannels::iterator data_channel_it =
840        data_channels_.find(label);
841    if (data_channel_it == data_channels_.end()) {
842      // This is a new data channel.
843      CreateRemoteDataChannel(label, it->first_ssrc());
844    } else {
845      data_channel_it->second->SetReceiveSsrc(it->first_ssrc());
846    }
847    existing_channels.push_back(label);
848  }
849
850  UpdateClosingDataChannels(existing_channels, false);
851}
852
853void MediaStreamSignaling::UpdateClosingDataChannels(
854    const std::vector<std::string>& active_channels, bool is_local_update) {
855  DataChannels::iterator it = data_channels_.begin();
856  while (it != data_channels_.end()) {
857    DataChannel* data_channel = it->second;
858    if (std::find(active_channels.begin(), active_channels.end(),
859                  data_channel->label()) != active_channels.end()) {
860      ++it;
861      continue;
862    }
863
864    if (is_local_update)
865      data_channel->SetSendSsrc(0);
866    else
867      data_channel->RemotePeerRequestClose();
868
869    if (data_channel->state() == DataChannel::kClosed) {
870      data_channels_.erase(it);
871      it = data_channels_.begin();
872    } else {
873      ++it;
874    }
875  }
876}
877
878void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label,
879                                                   uint32 remote_ssrc) {
880  if (!data_channel_factory_) {
881    LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
882                    << "are not supported.";
883    return;
884  }
885  scoped_refptr<DataChannel> channel(
886      data_channel_factory_->CreateDataChannel(label, NULL));
887  channel->SetReceiveSsrc(remote_ssrc);
888  stream_observer_->OnAddDataChannel(channel);
889}
890
891
892// Format defined at
893// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
894const uint8 DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
895
896enum DataChannelOpenMessageChannelType {
897  DCOMCT_ORDERED_RELIABLE = 0x00,
898  DCOMCT_ORDERED_PARTIAL_RTXS = 0x01,
899  DCOMCT_ORDERED_PARTIAL_TIME = 0x02,
900  DCOMCT_UNORDERED_RELIABLE = 0x80,
901  DCOMCT_UNORDERED_PARTIAL_RTXS = 0x81,
902  DCOMCT_UNORDERED_PARTIAL_TIME = 0x82,
903};
904
905bool MediaStreamSignaling::ParseDataChannelOpenMessage(
906    const talk_base::Buffer& payload,
907    std::string* label,
908    DataChannelInit* config) {
909  // Format defined at
910  // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
911
912  talk_base::ByteBuffer buffer(payload.data(), payload.length());
913
914  uint8 message_type;
915  if (!buffer.ReadUInt8(&message_type)) {
916    LOG(LS_WARNING) << "Could not read OPEN message type.";
917    return false;
918  }
919  if (message_type != DATA_CHANNEL_OPEN_MESSAGE_TYPE) {
920    LOG(LS_WARNING) << "Data Channel OPEN message of unexpected type: "
921                    << message_type;
922    return false;
923  }
924
925  uint8 channel_type;
926  if (!buffer.ReadUInt8(&channel_type)) {
927    LOG(LS_WARNING) << "Could not read OPEN message channel type.";
928    return false;
929  }
930  uint16 priority;
931  if (!buffer.ReadUInt16(&priority)) {
932    LOG(LS_WARNING) << "Could not read OPEN message reliabilility prioirty.";
933    return false;
934  }
935  uint32 reliability_param;
936  if (!buffer.ReadUInt32(&reliability_param)) {
937    LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
938    return false;
939  }
940  uint16 label_length;
941  if (!buffer.ReadUInt16(&label_length)) {
942    LOG(LS_WARNING) << "Could not read OPEN message label length.";
943    return false;
944  }
945  uint16 protocol_length;
946  if (!buffer.ReadUInt16(&protocol_length)) {
947    LOG(LS_WARNING) << "Could not read OPEN message protocol length.";
948    return false;
949  }
950  if (!buffer.ReadString(label, (size_t) label_length)) {
951    LOG(LS_WARNING) << "Could not read OPEN message label";
952    return false;
953  }
954  if (!buffer.ReadString(&config->protocol, protocol_length)) {
955    LOG(LS_WARNING) << "Could not read OPEN message protocol.";
956    return false;
957  }
958
959  config->ordered = true;
960  switch (channel_type) {
961    case DCOMCT_UNORDERED_RELIABLE:
962    case DCOMCT_UNORDERED_PARTIAL_RTXS:
963    case DCOMCT_UNORDERED_PARTIAL_TIME:
964      config->ordered = false;
965  }
966
967  config->maxRetransmits = -1;
968  config->maxRetransmitTime = -1;
969  switch (channel_type) {
970    case DCOMCT_ORDERED_PARTIAL_RTXS:
971    case DCOMCT_UNORDERED_PARTIAL_RTXS:
972      config->maxRetransmits = reliability_param;
973
974    case DCOMCT_ORDERED_PARTIAL_TIME:
975    case DCOMCT_UNORDERED_PARTIAL_TIME:
976      config->maxRetransmitTime = reliability_param;
977  }
978
979  return true;
980}
981
982bool MediaStreamSignaling::WriteDataChannelOpenMessage(
983    const std::string& label,
984    const DataChannelInit& config,
985    talk_base::Buffer* payload) {
986  // Format defined at
987  // http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
988  // TODO(pthatcher)
989
990  uint8 channel_type = 0;
991  uint32 reliability_param = 0;
992  uint16 priority = 0;
993  if (config.ordered) {
994    if (config.maxRetransmits > -1) {
995      channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
996      reliability_param = config.maxRetransmits;
997    } else if (config.maxRetransmitTime > -1) {
998      channel_type = DCOMCT_ORDERED_PARTIAL_TIME;
999      reliability_param = config.maxRetransmitTime;
1000    } else {
1001      channel_type = DCOMCT_ORDERED_RELIABLE;
1002    }
1003  } else {
1004    if (config.maxRetransmits > -1) {
1005      channel_type = DCOMCT_UNORDERED_PARTIAL_RTXS;
1006      reliability_param = config.maxRetransmits;
1007    } else if (config.maxRetransmitTime > -1) {
1008      channel_type = DCOMCT_UNORDERED_PARTIAL_TIME;
1009      reliability_param = config.maxRetransmitTime;
1010    } else {
1011      channel_type = DCOMCT_UNORDERED_RELIABLE;
1012    }
1013  }
1014
1015  talk_base::ByteBuffer buffer(
1016      NULL, 20 + label.length() + config.protocol.length(),
1017      talk_base::ByteBuffer::ORDER_NETWORK);
1018  buffer.WriteUInt8(DATA_CHANNEL_OPEN_MESSAGE_TYPE);
1019  buffer.WriteUInt8(channel_type);
1020  buffer.WriteUInt16(priority);
1021  buffer.WriteUInt32(reliability_param);
1022  buffer.WriteUInt16(static_cast<uint16>(label.length()));
1023  buffer.WriteUInt16(static_cast<uint16>(config.protocol.length()));
1024  buffer.WriteString(label);
1025  buffer.WriteString(config.protocol);
1026  payload->SetData(buffer.Data(), buffer.Length());
1027  return true;
1028}
1029
1030void MediaStreamSignaling::UpdateLocalSctpDataChannels() {
1031  DataChannels::iterator it = data_channels_.begin();
1032  for (; it != data_channels_.end(); ++it) {
1033    DataChannel* data_channel = it->second;
1034    data_channel->SetSendSsrc(data_channel->id());
1035  }
1036}
1037
1038void MediaStreamSignaling::UpdateRemoteSctpDataChannels() {
1039  DataChannels::iterator it = data_channels_.begin();
1040  for (; it != data_channels_.end(); ++it) {
1041    DataChannel* data_channel = it->second;
1042    data_channel->SetReceiveSsrc(data_channel->id());
1043  }
1044}
1045
1046}  // namespace webrtc
1047