1/*
2 * libjingle
3 * Copyright 2012, Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include <string>
29
30#include "talk/app/webrtc/audiotrack.h"
31#include "talk/app/webrtc/mediastream.h"
32#include "talk/app/webrtc/mediastreamsignaling.h"
33#include "talk/app/webrtc/streamcollection.h"
34#include "talk/app/webrtc/test/fakeconstraints.h"
35#include "talk/app/webrtc/test/fakedatachannelprovider.h"
36#include "talk/app/webrtc/videotrack.h"
37#include "talk/base/gunit.h"
38#include "talk/base/scoped_ptr.h"
39#include "talk/base/stringutils.h"
40#include "talk/base/thread.h"
41#include "talk/media/base/fakemediaengine.h"
42#include "talk/media/devices/fakedevicemanager.h"
43#include "talk/p2p/base/constants.h"
44#include "talk/p2p/base/sessiondescription.h"
45#include "talk/session/media/channelmanager.h"
46
47static const char kStreams[][8] = {"stream1", "stream2"};
48static const char kAudioTracks[][32] = {"audiotrack0", "audiotrack1"};
49static const char kVideoTracks[][32] = {"videotrack0", "videotrack1"};
50
51using webrtc::AudioTrack;
52using webrtc::AudioTrackInterface;
53using webrtc::AudioTrackVector;
54using webrtc::VideoTrack;
55using webrtc::VideoTrackInterface;
56using webrtc::VideoTrackVector;
57using webrtc::DataChannelInterface;
58using webrtc::FakeConstraints;
59using webrtc::IceCandidateInterface;
60using webrtc::MediaConstraintsInterface;
61using webrtc::MediaStreamInterface;
62using webrtc::MediaStreamTrackInterface;
63using webrtc::SdpParseError;
64using webrtc::SessionDescriptionInterface;
65using webrtc::StreamCollection;
66using webrtc::StreamCollectionInterface;
67
68// Reference SDP with a MediaStream with label "stream1" and audio track with
69// id "audio_1" and a video track with id "video_1;
70static const char kSdpStringWithStream1[] =
71    "v=0\r\n"
72    "o=- 0 0 IN IP4 127.0.0.1\r\n"
73    "s=-\r\n"
74    "t=0 0\r\n"
75    "m=audio 1 RTP/AVPF 103\r\n"
76    "a=mid:audio\r\n"
77    "a=rtpmap:103 ISAC/16000\r\n"
78    "a=ssrc:1 cname:stream1\r\n"
79    "a=ssrc:1 mslabel:stream1\r\n"
80    "a=ssrc:1 label:audiotrack0\r\n"
81    "m=video 1 RTP/AVPF 120\r\n"
82    "a=mid:video\r\n"
83    "a=rtpmap:120 VP8/90000\r\n"
84    "a=ssrc:2 cname:stream1\r\n"
85    "a=ssrc:2 mslabel:stream1\r\n"
86    "a=ssrc:2 label:videotrack0\r\n";
87
88// Reference SDP with two MediaStreams with label "stream1" and "stream2. Each
89// MediaStreams have one audio track and one video track.
90// This uses MSID.
91static const char kSdpStringWith2Stream[] =
92    "v=0\r\n"
93    "o=- 0 0 IN IP4 127.0.0.1\r\n"
94    "s=-\r\n"
95    "t=0 0\r\n"
96    "a=msid-semantic: WMS stream1 stream2\r\n"
97    "m=audio 1 RTP/AVPF 103\r\n"
98    "a=mid:audio\r\n"
99    "a=rtpmap:103 ISAC/16000\r\n"
100    "a=ssrc:1 cname:stream1\r\n"
101    "a=ssrc:1 msid:stream1 audiotrack0\r\n"
102    "a=ssrc:3 cname:stream2\r\n"
103    "a=ssrc:3 msid:stream2 audiotrack1\r\n"
104    "m=video 1 RTP/AVPF 120\r\n"
105    "a=mid:video\r\n"
106    "a=rtpmap:120 VP8/0\r\n"
107    "a=ssrc:2 cname:stream1\r\n"
108    "a=ssrc:2 msid:stream1 videotrack0\r\n"
109    "a=ssrc:4 cname:stream2\r\n"
110    "a=ssrc:4 msid:stream2 videotrack1\r\n";
111
112// Reference SDP without MediaStreams. Msid is not supported.
113static const char kSdpStringWithoutStreams[] =
114    "v=0\r\n"
115    "o=- 0 0 IN IP4 127.0.0.1\r\n"
116    "s=-\r\n"
117    "t=0 0\r\n"
118    "m=audio 1 RTP/AVPF 103\r\n"
119    "a=mid:audio\r\n"
120    "a=rtpmap:103 ISAC/16000\r\n"
121    "m=video 1 RTP/AVPF 120\r\n"
122    "a=mid:video\r\n"
123    "a=rtpmap:120 VP8/90000\r\n";
124
125// Reference SDP without MediaStreams. Msid is supported.
126static const char kSdpStringWithMsidWithoutStreams[] =
127    "v=0\r\n"
128    "o=- 0 0 IN IP4 127.0.0.1\r\n"
129    "s=-\r\n"
130    "t=0 0\r\n"
131    "a=msid-semantic: WMS\r\n"
132    "m=audio 1 RTP/AVPF 103\r\n"
133    "a=mid:audio\r\n"
134    "a=rtpmap:103 ISAC/16000\r\n"
135    "m=video 1 RTP/AVPF 120\r\n"
136    "a=mid:video\r\n"
137    "a=rtpmap:120 VP8/90000\r\n";
138
139// Reference SDP without MediaStreams and audio only.
140static const char kSdpStringWithoutStreamsAudioOnly[] =
141    "v=0\r\n"
142    "o=- 0 0 IN IP4 127.0.0.1\r\n"
143    "s=-\r\n"
144    "t=0 0\r\n"
145    "m=audio 1 RTP/AVPF 103\r\n"
146    "a=mid:audio\r\n"
147    "a=rtpmap:103 ISAC/16000\r\n";
148
149static const char kSdpStringInit[] =
150    "v=0\r\n"
151    "o=- 0 0 IN IP4 127.0.0.1\r\n"
152    "s=-\r\n"
153    "t=0 0\r\n"
154    "a=msid-semantic: WMS\r\n";
155
156static const char kSdpStringAudio[] =
157    "m=audio 1 RTP/AVPF 103\r\n"
158    "a=mid:audio\r\n"
159    "a=rtpmap:103 ISAC/16000\r\n";
160
161static const char kSdpStringVideo[] =
162    "m=video 1 RTP/AVPF 120\r\n"
163    "a=mid:video\r\n"
164    "a=rtpmap:120 VP8/90000\r\n";
165
166static const char kSdpStringMs1Audio0[] =
167    "a=ssrc:1 cname:stream1\r\n"
168    "a=ssrc:1 msid:stream1 audiotrack0\r\n";
169
170static const char kSdpStringMs1Video0[] =
171    "a=ssrc:2 cname:stream1\r\n"
172    "a=ssrc:2 msid:stream1 videotrack0\r\n";
173
174static const char kSdpStringMs1Audio1[] =
175    "a=ssrc:3 cname:stream1\r\n"
176    "a=ssrc:3 msid:stream1 audiotrack1\r\n";
177
178static const char kSdpStringMs1Video1[] =
179    "a=ssrc:4 cname:stream1\r\n"
180    "a=ssrc:4 msid:stream1 videotrack1\r\n";
181
182// Verifies that |options| contain all tracks in |collection| and that
183// the |options| has set the the has_audio and has_video flags correct.
184static void VerifyMediaOptions(StreamCollectionInterface* collection,
185                               const cricket::MediaSessionOptions& options) {
186  if (!collection) {
187    return;
188  }
189
190  size_t stream_index = 0;
191  for (size_t i = 0; i < collection->count(); ++i) {
192    MediaStreamInterface* stream = collection->at(i);
193    AudioTrackVector audio_tracks = stream->GetAudioTracks();
194    ASSERT_GE(options.streams.size(), stream_index + audio_tracks.size());
195    for (size_t j = 0; j < audio_tracks.size(); ++j) {
196      webrtc::AudioTrackInterface* audio = audio_tracks[j];
197      EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
198      EXPECT_EQ(options.streams[stream_index++].id, audio->id());
199      EXPECT_TRUE(options.has_audio);
200    }
201    VideoTrackVector video_tracks = stream->GetVideoTracks();
202    ASSERT_GE(options.streams.size(), stream_index + video_tracks.size());
203    for (size_t j = 0; j < video_tracks.size(); ++j) {
204      webrtc::VideoTrackInterface* video = video_tracks[j];
205      EXPECT_EQ(options.streams[stream_index].sync_label, stream->label());
206      EXPECT_EQ(options.streams[stream_index++].id, video->id());
207      EXPECT_TRUE(options.has_video);
208    }
209  }
210}
211
212static bool CompareStreamCollections(StreamCollectionInterface* s1,
213                                     StreamCollectionInterface* s2) {
214  if (s1 == NULL || s2 == NULL || s1->count() != s2->count())
215    return false;
216
217  for (size_t i = 0; i != s1->count(); ++i) {
218    if (s1->at(i)->label() != s2->at(i)->label())
219      return false;
220    webrtc::AudioTrackVector audio_tracks1 = s1->at(i)->GetAudioTracks();
221    webrtc::AudioTrackVector audio_tracks2 = s2->at(i)->GetAudioTracks();
222    webrtc::VideoTrackVector video_tracks1 = s1->at(i)->GetVideoTracks();
223    webrtc::VideoTrackVector video_tracks2 = s2->at(i)->GetVideoTracks();
224
225    if (audio_tracks1.size() != audio_tracks2.size())
226      return false;
227    for (size_t j = 0; j != audio_tracks1.size(); ++j) {
228       if (audio_tracks1[j]->id() != audio_tracks2[j]->id())
229         return false;
230    }
231    if (video_tracks1.size() != video_tracks2.size())
232      return false;
233    for (size_t j = 0; j != video_tracks1.size(); ++j) {
234      if (video_tracks1[j]->id() != video_tracks2[j]->id())
235        return false;
236    }
237  }
238  return true;
239}
240
241class FakeDataChannelFactory : public webrtc::DataChannelFactory {
242 public:
243  FakeDataChannelFactory(FakeDataChannelProvider* provider,
244                         cricket::DataChannelType dct)
245      : provider_(provider), type_(dct) {}
246
247  virtual talk_base::scoped_refptr<webrtc::DataChannel> CreateDataChannel(
248      const std::string& label,
249      const webrtc::DataChannelInit* config) {
250    return webrtc::DataChannel::Create(provider_, type_, label, config);
251  }
252
253 private:
254  FakeDataChannelProvider* provider_;
255  cricket::DataChannelType type_;
256};
257
258class MockSignalingObserver : public webrtc::MediaStreamSignalingObserver {
259 public:
260  MockSignalingObserver()
261      : remote_media_streams_(StreamCollection::Create()) {
262  }
263
264  virtual ~MockSignalingObserver() {
265  }
266
267  // New remote stream have been discovered.
268  virtual void OnAddRemoteStream(MediaStreamInterface* remote_stream) {
269    remote_media_streams_->AddStream(remote_stream);
270  }
271
272  // Remote stream is no longer available.
273  virtual void OnRemoveRemoteStream(MediaStreamInterface* remote_stream) {
274    remote_media_streams_->RemoveStream(remote_stream);
275  }
276
277  virtual void OnAddDataChannel(DataChannelInterface* data_channel) {
278  }
279
280  virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
281                                    AudioTrackInterface* audio_track,
282                                    uint32 ssrc) {
283    AddTrack(&local_audio_tracks_, stream, audio_track, ssrc);
284  }
285
286  virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
287                                    VideoTrackInterface* video_track,
288                                    uint32 ssrc) {
289    AddTrack(&local_video_tracks_, stream, video_track, ssrc);
290  }
291
292  virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
293                                       AudioTrackInterface* audio_track) {
294    RemoveTrack(&local_audio_tracks_, stream, audio_track);
295  }
296
297  virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
298                                       VideoTrackInterface* video_track) {
299    RemoveTrack(&local_video_tracks_, stream, video_track);
300  }
301
302  virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
303                                     AudioTrackInterface* audio_track,
304                                     uint32 ssrc) {
305    AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc);
306  }
307
308  virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
309                                     VideoTrackInterface* video_track,
310                                     uint32 ssrc) {
311    AddTrack(&remote_video_tracks_, stream, video_track, ssrc);
312  }
313
314  virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
315                                        AudioTrackInterface* audio_track) {
316    RemoveTrack(&remote_audio_tracks_, stream, audio_track);
317  }
318
319  virtual void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
320                                        VideoTrackInterface* video_track) {
321    RemoveTrack(&remote_video_tracks_, stream, video_track);
322  }
323
324  virtual void OnRemoveLocalStream(MediaStreamInterface* stream) {
325  }
326
327  MediaStreamInterface* RemoteStream(const std::string& label) {
328    return remote_media_streams_->find(label);
329  }
330
331  StreamCollectionInterface* remote_streams() const {
332    return remote_media_streams_;
333  }
334
335  size_t NumberOfRemoteAudioTracks() { return remote_audio_tracks_.size(); }
336
337  void  VerifyRemoteAudioTrack(const std::string& stream_label,
338                               const std::string& track_id,
339                               uint32 ssrc) {
340    VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc);
341  }
342
343  size_t NumberOfRemoteVideoTracks() { return remote_video_tracks_.size(); }
344
345  void  VerifyRemoteVideoTrack(const std::string& stream_label,
346                               const std::string& track_id,
347                               uint32 ssrc) {
348    VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc);
349  }
350
351  size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); }
352  void  VerifyLocalAudioTrack(const std::string& stream_label,
353                              const std::string& track_id,
354                              uint32 ssrc) {
355    VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc);
356  }
357
358  size_t NumberOfLocalVideoTracks() { return local_video_tracks_.size(); }
359
360  void  VerifyLocalVideoTrack(const std::string& stream_label,
361                              const std::string& track_id,
362                              uint32 ssrc) {
363    VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc);
364  }
365
366 private:
367  struct TrackInfo {
368    TrackInfo() {}
369    TrackInfo(const std::string& stream_label, const std::string track_id,
370              uint32 ssrc)
371        : stream_label(stream_label),
372          track_id(track_id),
373          ssrc(ssrc) {
374    }
375    std::string stream_label;
376    std::string track_id;
377    uint32 ssrc;
378  };
379  typedef std::map<std::string, TrackInfo> TrackInfos;
380
381  void AddTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
382                MediaStreamTrackInterface* track,
383                uint32 ssrc) {
384    (*track_infos)[track->id()] = TrackInfo(stream->label(), track->id(),
385                                            ssrc);
386  }
387
388  void RemoveTrack(TrackInfos* track_infos, MediaStreamInterface* stream,
389                   MediaStreamTrackInterface* track) {
390    TrackInfos::iterator it = track_infos->find(track->id());
391    ASSERT_TRUE(it != track_infos->end());
392    ASSERT_EQ(it->second.stream_label, stream->label());
393    track_infos->erase(it);
394  }
395
396  void VerifyTrack(const TrackInfos& track_infos,
397                   const std::string& stream_label,
398                   const std::string& track_id,
399                   uint32 ssrc) {
400    TrackInfos::const_iterator it = track_infos.find(track_id);
401    ASSERT_TRUE(it != track_infos.end());
402    EXPECT_EQ(stream_label, it->second.stream_label);
403    EXPECT_EQ(ssrc, it->second.ssrc);
404  }
405
406  TrackInfos remote_audio_tracks_;
407  TrackInfos remote_video_tracks_;
408  TrackInfos local_audio_tracks_;
409  TrackInfos local_video_tracks_;
410
411  talk_base::scoped_refptr<StreamCollection> remote_media_streams_;
412};
413
414class MediaStreamSignalingForTest : public webrtc::MediaStreamSignaling {
415 public:
416  MediaStreamSignalingForTest(MockSignalingObserver* observer,
417                              cricket::ChannelManager* channel_manager)
418      : webrtc::MediaStreamSignaling(talk_base::Thread::Current(), observer,
419                                     channel_manager) {
420  };
421
422  using webrtc::MediaStreamSignaling::GetOptionsForOffer;
423  using webrtc::MediaStreamSignaling::GetOptionsForAnswer;
424  using webrtc::MediaStreamSignaling::OnRemoteDescriptionChanged;
425  using webrtc::MediaStreamSignaling::remote_streams;
426};
427
428class MediaStreamSignalingTest: public testing::Test {
429 protected:
430  virtual void SetUp() {
431    observer_.reset(new MockSignalingObserver());
432    channel_manager_.reset(
433        new cricket::ChannelManager(new cricket::FakeMediaEngine(),
434                                    new cricket::FakeDeviceManager(),
435                                    talk_base::Thread::Current()));
436    signaling_.reset(new MediaStreamSignalingForTest(observer_.get(),
437                                                     channel_manager_.get()));
438    data_channel_provider_.reset(new FakeDataChannelProvider());
439  }
440
441  // Create a collection of streams.
442  // CreateStreamCollection(1) creates a collection that
443  // correspond to kSdpString1.
444  // CreateStreamCollection(2) correspond to kSdpString2.
445  talk_base::scoped_refptr<StreamCollection>
446  CreateStreamCollection(int number_of_streams) {
447    talk_base::scoped_refptr<StreamCollection> local_collection(
448        StreamCollection::Create());
449
450    for (int i = 0; i < number_of_streams; ++i) {
451      talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream(
452          webrtc::MediaStream::Create(kStreams[i]));
453
454      // Add a local audio track.
455      talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
456          webrtc::AudioTrack::Create(kAudioTracks[i], NULL));
457      stream->AddTrack(audio_track);
458
459      // Add a local video track.
460      talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track(
461          webrtc::VideoTrack::Create(kVideoTracks[i], NULL));
462      stream->AddTrack(video_track);
463
464      local_collection->AddStream(stream);
465    }
466    return local_collection;
467  }
468
469  // This functions Creates a MediaStream with label kStreams[0] and
470  // |number_of_audio_tracks| and |number_of_video_tracks| tracks and the
471  // corresponding SessionDescriptionInterface. The SessionDescriptionInterface
472  // is returned in |desc| and the MediaStream is stored in
473  // |reference_collection_|
474  void CreateSessionDescriptionAndReference(
475      size_t number_of_audio_tracks,
476      size_t number_of_video_tracks,
477      SessionDescriptionInterface** desc) {
478    ASSERT_TRUE(desc != NULL);
479    ASSERT_LE(number_of_audio_tracks, 2u);
480    ASSERT_LE(number_of_video_tracks, 2u);
481
482    reference_collection_ = StreamCollection::Create();
483    std::string sdp_ms1 = std::string(kSdpStringInit);
484
485    std::string mediastream_label = kStreams[0];
486
487    talk_base::scoped_refptr<webrtc::MediaStreamInterface> stream(
488            webrtc::MediaStream::Create(mediastream_label));
489    reference_collection_->AddStream(stream);
490
491    if (number_of_audio_tracks > 0) {
492      sdp_ms1 += std::string(kSdpStringAudio);
493      sdp_ms1 += std::string(kSdpStringMs1Audio0);
494      AddAudioTrack(kAudioTracks[0], stream);
495    }
496    if (number_of_audio_tracks > 1) {
497      sdp_ms1 += kSdpStringMs1Audio1;
498      AddAudioTrack(kAudioTracks[1], stream);
499    }
500
501    if (number_of_video_tracks > 0) {
502      sdp_ms1 += std::string(kSdpStringVideo);
503      sdp_ms1 += std::string(kSdpStringMs1Video0);
504      AddVideoTrack(kVideoTracks[0], stream);
505    }
506    if (number_of_video_tracks > 1) {
507      sdp_ms1 += kSdpStringMs1Video1;
508      AddVideoTrack(kVideoTracks[1], stream);
509    }
510
511    *desc = webrtc::CreateSessionDescription(
512        SessionDescriptionInterface::kOffer, sdp_ms1, NULL);
513  }
514
515  void AddAudioTrack(const std::string& track_id,
516                     MediaStreamInterface* stream) {
517    talk_base::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
518        webrtc::AudioTrack::Create(track_id, NULL));
519    ASSERT_TRUE(stream->AddTrack(audio_track));
520  }
521
522  void AddVideoTrack(const std::string& track_id,
523                     MediaStreamInterface* stream) {
524    talk_base::scoped_refptr<webrtc::VideoTrackInterface> video_track(
525        webrtc::VideoTrack::Create(track_id, NULL));
526    ASSERT_TRUE(stream->AddTrack(video_track));
527  }
528
529  talk_base::scoped_refptr<webrtc::DataChannel> AddDataChannel(
530      cricket::DataChannelType type, const std::string& label, int id) {
531    webrtc::DataChannelInit config;
532    config.id = id;
533    talk_base::scoped_refptr<webrtc::DataChannel> data_channel(
534        webrtc::DataChannel::Create(
535            data_channel_provider_.get(), type, label, &config));
536    EXPECT_TRUE(data_channel.get() != NULL);
537    EXPECT_TRUE(signaling_->AddDataChannel(data_channel.get()));
538    return data_channel;
539  }
540
541  // ChannelManager is used by VideoSource, so it should be released after all
542  // the video tracks. Put it as the first private variable should ensure that.
543  talk_base::scoped_ptr<cricket::ChannelManager> channel_manager_;
544  talk_base::scoped_refptr<StreamCollection> reference_collection_;
545  talk_base::scoped_ptr<MockSignalingObserver> observer_;
546  talk_base::scoped_ptr<MediaStreamSignalingForTest> signaling_;
547  talk_base::scoped_ptr<FakeDataChannelProvider> data_channel_provider_;
548};
549
550// Test that a MediaSessionOptions is created for an offer if
551// kOfferToReceiveAudio and kOfferToReceiveVideo constraints are set but no
552// MediaStreams are sent.
553TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudioVideo) {
554  FakeConstraints constraints;
555  constraints.SetMandatoryReceiveAudio(true);
556  constraints.SetMandatoryReceiveVideo(true);
557  cricket::MediaSessionOptions options;
558  EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
559  EXPECT_TRUE(options.has_audio);
560  EXPECT_TRUE(options.has_video);
561  EXPECT_TRUE(options.bundle_enabled);
562}
563
564// Test that a correct MediaSessionOptions is created for an offer if
565// kOfferToReceiveAudio constraints is set but no MediaStreams are sent.
566TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithAudio) {
567  FakeConstraints constraints;
568  constraints.SetMandatoryReceiveAudio(true);
569  cricket::MediaSessionOptions options;
570  EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
571  EXPECT_TRUE(options.has_audio);
572  EXPECT_FALSE(options.has_video);
573  EXPECT_TRUE(options.bundle_enabled);
574}
575
576// Test that a correct MediaSessionOptions is created for an offer if
577// no constraints or MediaStreams are sent.
578TEST_F(MediaStreamSignalingTest, GetDefaultMediaSessionOptionsForOffer) {
579  cricket::MediaSessionOptions options;
580  EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
581  EXPECT_TRUE(options.has_audio);
582  EXPECT_FALSE(options.has_video);
583  EXPECT_TRUE(options.bundle_enabled);
584}
585
586// Test that a correct MediaSessionOptions is created for an offer if
587// kOfferToReceiveVideo constraints is set but no MediaStreams are sent.
588TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsForOfferWithVideo) {
589  FakeConstraints constraints;
590  constraints.SetMandatoryReceiveAudio(false);
591  constraints.SetMandatoryReceiveVideo(true);
592  cricket::MediaSessionOptions options;
593  EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
594  EXPECT_FALSE(options.has_audio);
595  EXPECT_TRUE(options.has_video);
596  EXPECT_TRUE(options.bundle_enabled);
597}
598
599// Test that a correct MediaSessionOptions is created for an offer if
600// kUseRtpMux constraints is set to false.
601TEST_F(MediaStreamSignalingTest,
602       GetMediaSessionOptionsForOfferWithBundleDisabled) {
603  FakeConstraints constraints;
604  constraints.SetMandatoryReceiveAudio(true);
605  constraints.SetMandatoryReceiveVideo(true);
606  constraints.SetMandatoryUseRtpMux(false);
607  cricket::MediaSessionOptions options;
608  EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
609  EXPECT_TRUE(options.has_audio);
610  EXPECT_TRUE(options.has_video);
611  EXPECT_FALSE(options.bundle_enabled);
612}
613
614// Test that a correct MediaSessionOptions is created to restart ice if
615// kIceRestart constraints is set. It also tests that subsequent
616// MediaSessionOptions don't have |transport_options.ice_restart| set.
617TEST_F(MediaStreamSignalingTest,
618       GetMediaSessionOptionsForOfferWithIceRestart) {
619  FakeConstraints constraints;
620  constraints.SetMandatoryIceRestart(true);
621  cricket::MediaSessionOptions options;
622  EXPECT_TRUE(signaling_->GetOptionsForOffer(&constraints, &options));
623  EXPECT_TRUE(options.transport_options.ice_restart);
624
625  EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
626  EXPECT_FALSE(options.transport_options.ice_restart);
627}
628
629// Test that GetMediaSessionOptionsForOffer and GetOptionsForAnswer work as
630// expected if unknown constraints are used.
631TEST_F(MediaStreamSignalingTest, GetMediaSessionOptionsWithBadConstraints) {
632  FakeConstraints mandatory;
633  mandatory.AddMandatory("bad_key", "bad_value");
634  cricket::MediaSessionOptions options;
635  EXPECT_FALSE(signaling_->GetOptionsForOffer(&mandatory, &options));
636  EXPECT_FALSE(signaling_->GetOptionsForAnswer(&mandatory, &options));
637
638  FakeConstraints optional;
639  optional.AddOptional("bad_key", "bad_value");
640  EXPECT_TRUE(signaling_->GetOptionsForOffer(&optional, &options));
641  EXPECT_TRUE(signaling_->GetOptionsForAnswer(&optional, &options));
642}
643
644// Test that a correct MediaSessionOptions are created for an offer if
645// a MediaStream is sent and later updated with a new track.
646// MediaConstraints are not used.
647TEST_F(MediaStreamSignalingTest, AddTrackToLocalMediaStream) {
648  talk_base::scoped_refptr<StreamCollection> local_streams(
649      CreateStreamCollection(1));
650  MediaStreamInterface* local_stream = local_streams->at(0);
651  EXPECT_TRUE(signaling_->AddLocalStream(local_stream));
652  cricket::MediaSessionOptions options;
653  EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
654  VerifyMediaOptions(local_streams, options);
655
656  cricket::MediaSessionOptions updated_options;
657  local_stream->AddTrack(AudioTrack::Create(kAudioTracks[1], NULL));
658  EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL, &options));
659  VerifyMediaOptions(local_streams, options);
660}
661
662// Test that the MediaConstraints in an answer don't affect if audio and video
663// is offered in an offer but that if kOfferToReceiveAudio or
664// kOfferToReceiveVideo constraints are true in an offer, the media type will be
665// included in subsequent answers.
666TEST_F(MediaStreamSignalingTest, MediaConstraintsInAnswer) {
667  FakeConstraints answer_c;
668  answer_c.SetMandatoryReceiveAudio(true);
669  answer_c.SetMandatoryReceiveVideo(true);
670
671  cricket::MediaSessionOptions answer_options;
672  EXPECT_TRUE(signaling_->GetOptionsForAnswer(&answer_c, &answer_options));
673  EXPECT_TRUE(answer_options.has_audio);
674  EXPECT_TRUE(answer_options.has_video);
675
676  FakeConstraints offer_c;
677  offer_c.SetMandatoryReceiveAudio(false);
678  offer_c.SetMandatoryReceiveVideo(false);
679
680  cricket::MediaSessionOptions offer_options;
681  EXPECT_TRUE(signaling_->GetOptionsForOffer(&offer_c, &offer_options));
682  EXPECT_FALSE(offer_options.has_audio);
683  EXPECT_FALSE(offer_options.has_video);
684
685  FakeConstraints updated_offer_c;
686  updated_offer_c.SetMandatoryReceiveAudio(true);
687  updated_offer_c.SetMandatoryReceiveVideo(true);
688
689  cricket::MediaSessionOptions updated_offer_options;
690  EXPECT_TRUE(signaling_->GetOptionsForOffer(&updated_offer_c,
691                                             &updated_offer_options));
692  EXPECT_TRUE(updated_offer_options.has_audio);
693  EXPECT_TRUE(updated_offer_options.has_video);
694
695  // Since an offer has been created with both audio and video, subsequent
696  // offers and answers should contain both audio and video.
697  // Answers will only contain the media types that exist in the offer
698  // regardless of the value of |updated_answer_options.has_audio| and
699  // |updated_answer_options.has_video|.
700  FakeConstraints updated_answer_c;
701  answer_c.SetMandatoryReceiveAudio(false);
702  answer_c.SetMandatoryReceiveVideo(false);
703
704  cricket::MediaSessionOptions updated_answer_options;
705  EXPECT_TRUE(signaling_->GetOptionsForAnswer(&updated_answer_c,
706                                              &updated_answer_options));
707  EXPECT_TRUE(updated_answer_options.has_audio);
708  EXPECT_TRUE(updated_answer_options.has_video);
709
710  EXPECT_TRUE(signaling_->GetOptionsForOffer(NULL,
711                                             &updated_offer_options));
712  EXPECT_TRUE(updated_offer_options.has_audio);
713  EXPECT_TRUE(updated_offer_options.has_video);
714}
715
716// This test verifies that the remote MediaStreams corresponding to a received
717// SDP string is created. In this test the two separate MediaStreams are
718// signaled.
719TEST_F(MediaStreamSignalingTest, UpdateRemoteStreams) {
720  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
721      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
722                                       kSdpStringWithStream1, NULL));
723  EXPECT_TRUE(desc != NULL);
724  signaling_->OnRemoteDescriptionChanged(desc.get());
725
726  talk_base::scoped_refptr<StreamCollection> reference(
727      CreateStreamCollection(1));
728  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
729                                       reference.get()));
730  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
731                                       reference.get()));
732  EXPECT_EQ(1u, observer_->NumberOfRemoteAudioTracks());
733  observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
734  EXPECT_EQ(1u, observer_->NumberOfRemoteVideoTracks());
735  observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
736  ASSERT_EQ(1u, observer_->remote_streams()->count());
737  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
738  EXPECT_TRUE(remote_stream->GetVideoTracks()[0]->GetSource() != NULL);
739
740  // Create a session description based on another SDP with another
741  // MediaStream.
742  talk_base::scoped_ptr<SessionDescriptionInterface> update_desc(
743      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
744                                       kSdpStringWith2Stream, NULL));
745  EXPECT_TRUE(update_desc != NULL);
746  signaling_->OnRemoteDescriptionChanged(update_desc.get());
747
748  talk_base::scoped_refptr<StreamCollection> reference2(
749      CreateStreamCollection(2));
750  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
751                                       reference2.get()));
752  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
753                                       reference2.get()));
754
755  EXPECT_EQ(2u, observer_->NumberOfRemoteAudioTracks());
756  observer_->VerifyRemoteAudioTrack(kStreams[0], kAudioTracks[0], 1);
757  observer_->VerifyRemoteAudioTrack(kStreams[1], kAudioTracks[1], 3);
758  EXPECT_EQ(2u, observer_->NumberOfRemoteVideoTracks());
759  observer_->VerifyRemoteVideoTrack(kStreams[0], kVideoTracks[0], 2);
760  observer_->VerifyRemoteVideoTrack(kStreams[1], kVideoTracks[1], 4);
761}
762
763// This test verifies that the remote MediaStreams corresponding to a received
764// SDP string is created. In this test the same remote MediaStream is signaled
765// but MediaStream tracks are added and removed.
766TEST_F(MediaStreamSignalingTest, AddRemoveTrackFromExistingRemoteMediaStream) {
767  talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1;
768  CreateSessionDescriptionAndReference(1, 1, desc_ms1.use());
769  signaling_->OnRemoteDescriptionChanged(desc_ms1.get());
770  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
771                                       reference_collection_));
772
773  // Add extra audio and video tracks to the same MediaStream.
774  talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms1_two_tracks;
775  CreateSessionDescriptionAndReference(2, 2, desc_ms1_two_tracks.use());
776  signaling_->OnRemoteDescriptionChanged(desc_ms1_two_tracks.get());
777  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
778                                       reference_collection_));
779  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
780                                       reference_collection_));
781
782  // Remove the extra audio and video tracks again.
783  talk_base::scoped_ptr<SessionDescriptionInterface> desc_ms2;
784  CreateSessionDescriptionAndReference(1, 1, desc_ms2.use());
785  signaling_->OnRemoteDescriptionChanged(desc_ms2.get());
786  EXPECT_TRUE(CompareStreamCollections(signaling_->remote_streams(),
787                                       reference_collection_));
788  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
789                                       reference_collection_));
790}
791
792// This test that remote tracks are ended if a
793// local session description is set that rejects the media content type.
794TEST_F(MediaStreamSignalingTest, RejectMediaContent) {
795  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
796      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
797                                       kSdpStringWithStream1, NULL));
798  EXPECT_TRUE(desc != NULL);
799  signaling_->OnRemoteDescriptionChanged(desc.get());
800
801  ASSERT_EQ(1u, observer_->remote_streams()->count());
802  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
803  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
804  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
805
806  talk_base::scoped_refptr<webrtc::VideoTrackInterface> remote_video =
807      remote_stream->GetVideoTracks()[0];
808  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_video->state());
809  talk_base::scoped_refptr<webrtc::AudioTrackInterface> remote_audio =
810      remote_stream->GetAudioTracks()[0];
811  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
812
813  cricket::ContentInfo* video_info =
814      desc->description()->GetContentByName("video");
815  ASSERT_TRUE(video_info != NULL);
816  video_info->rejected = true;
817  signaling_->OnLocalDescriptionChanged(desc.get());
818  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_video->state());
819  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kLive, remote_audio->state());
820
821  cricket::ContentInfo* audio_info =
822      desc->description()->GetContentByName("audio");
823  ASSERT_TRUE(audio_info != NULL);
824  audio_info->rejected = true;
825  signaling_->OnLocalDescriptionChanged(desc.get());
826  EXPECT_EQ(webrtc::MediaStreamTrackInterface::kEnded, remote_audio->state());
827}
828
829// This test that it won't crash if the remote track as been removed outside
830// of MediaStreamSignaling and then MediaStreamSignaling tries to reject
831// this track.
832TEST_F(MediaStreamSignalingTest, RemoveTrackThenRejectMediaContent) {
833  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
834      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
835                                       kSdpStringWithStream1, NULL));
836  EXPECT_TRUE(desc != NULL);
837  signaling_->OnRemoteDescriptionChanged(desc.get());
838
839  MediaStreamInterface* remote_stream =  observer_->remote_streams()->at(0);
840  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
841  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
842
843  cricket::ContentInfo* video_info =
844      desc->description()->GetContentByName("video");
845  video_info->rejected = true;
846  signaling_->OnLocalDescriptionChanged(desc.get());
847
848  cricket::ContentInfo* audio_info =
849      desc->description()->GetContentByName("audio");
850  audio_info->rejected = true;
851  signaling_->OnLocalDescriptionChanged(desc.get());
852
853  // No crash is a pass.
854}
855
856// This tests that a default MediaStream is created if a remote session
857// description doesn't contain any streams and no MSID support.
858// It also tests that the default stream is updated if a video m-line is added
859// in a subsequent session description.
860TEST_F(MediaStreamSignalingTest, SdpWithoutMsidCreatesDefaultStream) {
861  talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only(
862      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
863                                       kSdpStringWithoutStreamsAudioOnly,
864                                       NULL));
865  ASSERT_TRUE(desc_audio_only != NULL);
866  signaling_->OnRemoteDescriptionChanged(desc_audio_only.get());
867
868  EXPECT_EQ(1u, signaling_->remote_streams()->count());
869  ASSERT_EQ(1u, observer_->remote_streams()->count());
870  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
871
872  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
873  EXPECT_EQ(0u, remote_stream->GetVideoTracks().size());
874  EXPECT_EQ("default", remote_stream->label());
875
876  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
877      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
878                                       kSdpStringWithoutStreams, NULL));
879  ASSERT_TRUE(desc != NULL);
880  signaling_->OnRemoteDescriptionChanged(desc.get());
881  EXPECT_EQ(1u, signaling_->remote_streams()->count());
882  ASSERT_EQ(1u, remote_stream->GetAudioTracks().size());
883  EXPECT_EQ("defaulta0", remote_stream->GetAudioTracks()[0]->id());
884  ASSERT_EQ(1u, remote_stream->GetVideoTracks().size());
885  EXPECT_EQ("defaultv0", remote_stream->GetVideoTracks()[0]->id());
886  observer_->VerifyRemoteAudioTrack("default", "defaulta0", 0);
887  observer_->VerifyRemoteVideoTrack("default", "defaultv0", 0);
888}
889
890// This tests that it won't crash when MediaStreamSignaling tries to remove
891//  a remote track that as already been removed from the mediastream.
892TEST_F(MediaStreamSignalingTest, RemoveAlreadyGoneRemoteStream) {
893  talk_base::scoped_ptr<SessionDescriptionInterface> desc_audio_only(
894      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
895                                       kSdpStringWithoutStreams,
896                                       NULL));
897  ASSERT_TRUE(desc_audio_only != NULL);
898  signaling_->OnRemoteDescriptionChanged(desc_audio_only.get());
899  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
900  remote_stream->RemoveTrack(remote_stream->GetAudioTracks()[0]);
901  remote_stream->RemoveTrack(remote_stream->GetVideoTracks()[0]);
902
903  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
904      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
905                                       kSdpStringWithoutStreams, NULL));
906  ASSERT_TRUE(desc != NULL);
907  signaling_->OnRemoteDescriptionChanged(desc.get());
908
909  // No crash is a pass.
910}
911
912// This tests that a default MediaStream is created if the remote session
913// description doesn't contain any streams and don't contain an indication if
914// MSID is supported.
915TEST_F(MediaStreamSignalingTest,
916       SdpWithoutMsidAndStreamsCreatesDefaultStream) {
917  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
918      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
919                                       kSdpStringWithoutStreams,
920                                       NULL));
921  ASSERT_TRUE(desc != NULL);
922  signaling_->OnRemoteDescriptionChanged(desc.get());
923
924  ASSERT_EQ(1u, observer_->remote_streams()->count());
925  MediaStreamInterface* remote_stream = observer_->remote_streams()->at(0);
926  EXPECT_EQ(1u, remote_stream->GetAudioTracks().size());
927  EXPECT_EQ(1u, remote_stream->GetVideoTracks().size());
928}
929
930// This tests that a default MediaStream is not created if the remote session
931// description doesn't contain any streams but does support MSID.
932TEST_F(MediaStreamSignalingTest, SdpWitMsidDontCreatesDefaultStream) {
933  talk_base::scoped_ptr<SessionDescriptionInterface> desc_msid_without_streams(
934      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
935                                       kSdpStringWithMsidWithoutStreams,
936                                       NULL));
937  signaling_->OnRemoteDescriptionChanged(desc_msid_without_streams.get());
938  EXPECT_EQ(0u, observer_->remote_streams()->count());
939}
940
941// This test that a default MediaStream is not created if a remote session
942// description is updated to not have any MediaStreams.
943TEST_F(MediaStreamSignalingTest, VerifyDefaultStreamIsNotCreated) {
944  talk_base::scoped_ptr<SessionDescriptionInterface> desc(
945      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
946                                       kSdpStringWithStream1,
947                                       NULL));
948  ASSERT_TRUE(desc != NULL);
949  signaling_->OnRemoteDescriptionChanged(desc.get());
950  talk_base::scoped_refptr<StreamCollection> reference(
951      CreateStreamCollection(1));
952  EXPECT_TRUE(CompareStreamCollections(observer_->remote_streams(),
953                                       reference.get()));
954
955  talk_base::scoped_ptr<SessionDescriptionInterface> desc_without_streams(
956      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
957                                       kSdpStringWithoutStreams,
958                                       NULL));
959  signaling_->OnRemoteDescriptionChanged(desc_without_streams.get());
960  EXPECT_EQ(0u, observer_->remote_streams()->count());
961}
962
963// This test that the correct MediaStreamSignalingObserver methods are called
964// when MediaStreamSignaling::OnLocalDescriptionChanged is called with an
965// updated local session description.
966TEST_F(MediaStreamSignalingTest, LocalDescriptionChanged) {
967  talk_base::scoped_ptr<SessionDescriptionInterface> desc_1;
968  CreateSessionDescriptionAndReference(2, 2, desc_1.use());
969
970  signaling_->AddLocalStream(reference_collection_->at(0));
971  signaling_->OnLocalDescriptionChanged(desc_1.get());
972  EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
973  EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
974  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
975  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
976  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
977  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
978
979  // Remove an audio and video track.
980  talk_base::scoped_ptr<SessionDescriptionInterface> desc_2;
981  CreateSessionDescriptionAndReference(1, 1, desc_2.use());
982  signaling_->OnLocalDescriptionChanged(desc_2.get());
983  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
984  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
985  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
986  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
987}
988
989// This test that the correct MediaStreamSignalingObserver methods are called
990// when MediaStreamSignaling::AddLocalStream is called after
991// MediaStreamSignaling::OnLocalDescriptionChanged is called.
992TEST_F(MediaStreamSignalingTest, AddLocalStreamAfterLocalDescriptionChanged) {
993  talk_base::scoped_ptr<SessionDescriptionInterface> desc_1;
994  CreateSessionDescriptionAndReference(2, 2, desc_1.use());
995
996  signaling_->OnLocalDescriptionChanged(desc_1.get());
997  EXPECT_EQ(0u, observer_->NumberOfLocalAudioTracks());
998  EXPECT_EQ(0u, observer_->NumberOfLocalVideoTracks());
999
1000  signaling_->AddLocalStream(reference_collection_->at(0));
1001  EXPECT_EQ(2u, observer_->NumberOfLocalAudioTracks());
1002  EXPECT_EQ(2u, observer_->NumberOfLocalVideoTracks());
1003  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
1004  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
1005  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[1], 3);
1006  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[1], 4);
1007}
1008
1009// This test that the correct MediaStreamSignalingObserver methods are called
1010// if the ssrc on a local track is changed when
1011// MediaStreamSignaling::OnLocalDescriptionChanged is called.
1012TEST_F(MediaStreamSignalingTest, ChangeSsrcOnTrackInLocalSessionDescription) {
1013  talk_base::scoped_ptr<SessionDescriptionInterface> desc;
1014  CreateSessionDescriptionAndReference(1, 1, desc.use());
1015
1016  signaling_->AddLocalStream(reference_collection_->at(0));
1017  signaling_->OnLocalDescriptionChanged(desc.get());
1018  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
1019  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
1020  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 1);
1021  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 2);
1022
1023  // Change the ssrc of the audio and video track.
1024  std::string sdp;
1025  desc->ToString(&sdp);
1026  std::string ssrc_org = "a=ssrc:1";
1027  std::string ssrc_to = "a=ssrc:97";
1028  talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
1029                             ssrc_to.c_str(), ssrc_to.length(),
1030                             &sdp);
1031  ssrc_org = "a=ssrc:2";
1032  ssrc_to = "a=ssrc:98";
1033  talk_base::replace_substrs(ssrc_org.c_str(), ssrc_org.length(),
1034                             ssrc_to.c_str(), ssrc_to.length(),
1035                             &sdp);
1036  talk_base::scoped_ptr<SessionDescriptionInterface> updated_desc(
1037      webrtc::CreateSessionDescription(SessionDescriptionInterface::kOffer,
1038                                       sdp, NULL));
1039
1040  signaling_->OnLocalDescriptionChanged(updated_desc.get());
1041  EXPECT_EQ(1u, observer_->NumberOfLocalAudioTracks());
1042  EXPECT_EQ(1u, observer_->NumberOfLocalVideoTracks());
1043  observer_->VerifyLocalAudioTrack(kStreams[0], kAudioTracks[0], 97);
1044  observer_->VerifyLocalVideoTrack(kStreams[0], kVideoTracks[0], 98);
1045}
1046
1047// Verifies that an even SCTP id is allocated for SSL_CLIENT and an odd id for
1048// SSL_SERVER.
1049TEST_F(MediaStreamSignalingTest, SctpIdAllocationBasedOnRole) {
1050  int id;
1051  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &id));
1052  EXPECT_EQ(1, id);
1053  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &id));
1054  EXPECT_EQ(0, id);
1055  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &id));
1056  EXPECT_EQ(3, id);
1057  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &id));
1058  EXPECT_EQ(2, id);
1059}
1060
1061// Verifies that SCTP ids of existing DataChannels are not reused.
1062TEST_F(MediaStreamSignalingTest, SctpIdAllocationNoReuse) {
1063  int old_id = 1;
1064  AddDataChannel(cricket::DCT_SCTP, "a", old_id);
1065
1066  int new_id;
1067  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_SERVER, &new_id));
1068  EXPECT_NE(old_id, new_id);
1069
1070  // Creates a DataChannel with id 0.
1071  old_id = 0;
1072  AddDataChannel(cricket::DCT_SCTP, "a", old_id);
1073  ASSERT_TRUE(signaling_->AllocateSctpSid(talk_base::SSL_CLIENT, &new_id));
1074  EXPECT_NE(old_id, new_id);
1075}
1076
1077// Verifies that duplicated label is not allowed for RTP data channel.
1078TEST_F(MediaStreamSignalingTest, RtpDuplicatedLabelNotAllowed) {
1079  AddDataChannel(cricket::DCT_RTP, "a", -1);
1080
1081  webrtc::DataChannelInit config;
1082  talk_base::scoped_refptr<webrtc::DataChannel> data_channel =
1083      webrtc::DataChannel::Create(
1084          data_channel_provider_.get(), cricket::DCT_RTP, "a", &config);
1085  ASSERT_TRUE(data_channel.get() != NULL);
1086  EXPECT_FALSE(signaling_->AddDataChannel(data_channel.get()));
1087}
1088
1089// Verifies that duplicated label is allowed for SCTP data channel.
1090TEST_F(MediaStreamSignalingTest, SctpDuplicatedLabelAllowed) {
1091  AddDataChannel(cricket::DCT_SCTP, "a", -1);
1092  AddDataChannel(cricket::DCT_SCTP, "a", -1);
1093}
1094
1095// Verifies that duplicated label from OPEN message is allowed.
1096TEST_F(MediaStreamSignalingTest, DuplicatedLabelFromOpenMessageAllowed) {
1097  AddDataChannel(cricket::DCT_SCTP, "a", -1);
1098
1099  FakeDataChannelFactory fake_factory(data_channel_provider_.get(),
1100                                      cricket::DCT_SCTP);
1101  signaling_->SetDataChannelFactory(&fake_factory);
1102  webrtc::DataChannelInit config;
1103  config.id = 0;
1104  EXPECT_TRUE(signaling_->AddDataChannelFromOpenMessage("a", config));
1105}
1106