1/*
2 * libjingle
3 * Copyright 2004 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#ifndef TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_
29#define TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_
30
31#include <string>
32#include <vector>
33
34#include "talk/base/logging.h"
35#include "talk/base/sigslotrepeater.h"
36#include "talk/media/base/codec.h"
37#include "talk/media/base/mediachannel.h"
38#include "talk/media/base/videocapturer.h"
39#include "talk/media/base/videocommon.h"
40
41namespace cricket {
42
43struct Device;
44struct VideoFormat;
45class HybridVideoEngineInterface;
46class VideoCapturer;
47class VideoFrame;
48class VideoRenderer;
49
50// HybridVideoMediaChannels work with a HybridVideoEngine to combine
51// two unrelated VideoMediaChannel implementations into a single class.
52class HybridVideoMediaChannel : public VideoMediaChannel {
53 public:
54  HybridVideoMediaChannel(HybridVideoEngineInterface* engine,
55                          VideoMediaChannel* channel1,
56                          VideoMediaChannel* channel2);
57  virtual ~HybridVideoMediaChannel();
58
59  // VideoMediaChannel methods
60  virtual void SetInterface(NetworkInterface* iface);
61  virtual bool SetOptions(const VideoOptions& options);
62  virtual bool GetOptions(VideoOptions* options) const;
63  virtual bool AddSendStream(const StreamParams& sp);
64  virtual bool RemoveSendStream(uint32 ssrc);
65  virtual bool SetRenderer(uint32 ssrc, VideoRenderer* renderer);
66  virtual bool SetRender(bool render);
67  virtual bool MuteStream(uint32 ssrc, bool muted);
68
69  virtual bool SetRecvCodecs(const std::vector<VideoCodec>& codecs);
70  virtual bool SetRecvRtpHeaderExtensions(
71      const std::vector<RtpHeaderExtension>& extensions);
72
73  virtual bool SetSendCodecs(const std::vector<VideoCodec>& codecs);
74  virtual bool GetSendCodec(VideoCodec* codec);
75  virtual bool SetSendStreamFormat(uint32 ssrc, const VideoFormat& format);
76  virtual bool SetSendRtpHeaderExtensions(
77      const std::vector<RtpHeaderExtension>& extensions);
78  virtual bool SetSendBandwidth(bool autobw, int bps);
79  virtual bool SetSend(bool send);
80
81  virtual bool AddRecvStream(const StreamParams& sp);
82  virtual bool RemoveRecvStream(uint32 ssrc);
83  virtual bool SetCapturer(uint32 ssrc, VideoCapturer* capturer);
84
85  virtual bool SendIntraFrame();
86  virtual bool RequestIntraFrame();
87
88  virtual bool GetStats(VideoMediaInfo* info);
89
90  virtual void OnPacketReceived(talk_base::Buffer* packet,
91                                const talk_base::PacketTime& packet_time);
92  virtual void OnRtcpReceived(talk_base::Buffer* packet,
93                              const talk_base::PacketTime& packet_time);
94  virtual void OnReadyToSend(bool ready);
95
96  virtual void UpdateAspectRatio(int ratio_w, int ratio_h);
97
98  void OnLocalFrame(VideoCapturer*, const VideoFrame*);
99  void OnLocalFrameFormat(VideoCapturer*, const VideoFormat*);
100
101  bool sending() const { return sending_; }
102
103 private:
104  bool SelectActiveChannel(const std::vector<VideoCodec>& codecs);
105  void SplitCodecs(const std::vector<VideoCodec>& codecs,
106                   std::vector<VideoCodec>* codecs1,
107                   std::vector<VideoCodec>* codecs2);
108
109  void OnMediaError(uint32 ssrc, Error error);
110
111  HybridVideoEngineInterface* engine_;
112  talk_base::scoped_ptr<VideoMediaChannel> channel1_;
113  talk_base::scoped_ptr<VideoMediaChannel> channel2_;
114  VideoMediaChannel* active_channel_;
115  bool sending_;
116};
117
118// Interface class for HybridVideoChannels to talk to the engine.
119class HybridVideoEngineInterface {
120 public:
121  virtual ~HybridVideoEngineInterface() {}
122  virtual bool HasCodec1(const VideoCodec& codec) = 0;
123  virtual bool HasCodec2(const VideoCodec& codec) = 0;
124  virtual void OnSendChange1(VideoMediaChannel* channel1, bool send) = 0;
125  virtual void OnSendChange2(VideoMediaChannel* channel1, bool send) = 0;
126  virtual void OnNewSendResolution(int width, int height) = 0;
127};
128
129// The HybridVideoEngine class combines two unrelated VideoEngine impls
130// into a single class. It creates HybridVideoMediaChannels that also contain
131// a VideoMediaChannel implementation from each engine. Policy is then used
132// during call setup to determine which VideoMediaChannel should be used.
133// Currently, this policy is based on what codec the remote side wants to use.
134template<class VIDEO1, class VIDEO2>
135class HybridVideoEngine : public HybridVideoEngineInterface {
136 public:
137  HybridVideoEngine() {
138    // Unify the codec lists.
139    codecs_ = video1_.codecs();
140    codecs_.insert(codecs_.end(), video2_.codecs().begin(),
141                   video2_.codecs().end());
142
143    rtp_header_extensions_ = video1_.rtp_header_extensions();
144    rtp_header_extensions_.insert(rtp_header_extensions_.end(),
145                                  video2_.rtp_header_extensions().begin(),
146                                  video2_.rtp_header_extensions().end());
147
148    SignalCaptureStateChange.repeat(video2_.SignalCaptureStateChange);
149  }
150
151  bool Init(talk_base::Thread* worker_thread) {
152    if (!video1_.Init(worker_thread)) {
153      LOG(LS_ERROR) << "Failed to init VideoEngine1";
154      return false;
155    }
156    if (!video2_.Init(worker_thread)) {
157      LOG(LS_ERROR) << "Failed to init VideoEngine2";
158      video1_.Terminate();
159      return false;
160    }
161    return true;
162  }
163  void Terminate() {
164    video1_.Terminate();
165    video2_.Terminate();
166  }
167
168  int GetCapabilities() {
169    return (video1_.GetCapabilities() | video2_.GetCapabilities());
170  }
171  HybridVideoMediaChannel* CreateChannel(VoiceMediaChannel* channel) {
172    talk_base::scoped_ptr<VideoMediaChannel> channel1(
173        video1_.CreateChannel(channel));
174    if (!channel1) {
175      LOG(LS_ERROR) << "Failed to create VideoMediaChannel1";
176      return NULL;
177    }
178    talk_base::scoped_ptr<VideoMediaChannel> channel2(
179        video2_.CreateChannel(channel));
180    if (!channel2) {
181      LOG(LS_ERROR) << "Failed to create VideoMediaChannel2";
182      return NULL;
183    }
184    return new HybridVideoMediaChannel(this,
185        channel1.release(), channel2.release());
186  }
187
188  bool SetOptions(const VideoOptions& options) {
189    return video1_.SetOptions(options) && video2_.SetOptions(options);
190  }
191  bool SetDefaultEncoderConfig(const VideoEncoderConfig& config) {
192    VideoEncoderConfig conf = config;
193    if (video1_.codecs().size() > 0) {
194      conf.max_codec.name = video1_.codecs()[0].name;
195      if (!video1_.SetDefaultEncoderConfig(conf)) {
196        LOG(LS_ERROR) << "Failed to SetDefaultEncoderConfig for video1";
197        return false;
198      }
199    }
200    if (video2_.codecs().size() > 0) {
201      conf.max_codec.name = video2_.codecs()[0].name;
202      if (!video2_.SetDefaultEncoderConfig(conf)) {
203        LOG(LS_ERROR) << "Failed to SetDefaultEncoderConfig for video2";
204        return false;
205      }
206    }
207    return true;
208  }
209  VideoEncoderConfig GetDefaultEncoderConfig() const {
210    // This looks pretty strange, but, in practice, it'll do sane things if
211    // GetDefaultEncoderConfig is only called after SetDefaultEncoderConfig,
212    // since both engines should be essentially equivalent at that point. If it
213    // hasn't been called, though, we'll use the first meaningful encoder
214    // config, or the config from the second video engine if neither are
215    // meaningful.
216    VideoEncoderConfig config = video1_.GetDefaultEncoderConfig();
217    if (config.max_codec.width != 0) {
218      return config;
219    } else {
220      return video2_.GetDefaultEncoderConfig();
221    }
222  }
223  const std::vector<VideoCodec>& codecs() const {
224    return codecs_;
225  }
226  const std::vector<RtpHeaderExtension>& rtp_header_extensions() const {
227    return rtp_header_extensions_;
228  }
229  void SetLogging(int min_sev, const char* filter) {
230    video1_.SetLogging(min_sev, filter);
231    video2_.SetLogging(min_sev, filter);
232  }
233
234  VideoFormat GetStartCaptureFormat() const {
235    return video2_.GetStartCaptureFormat();
236  }
237
238  // TODO(juberti): Remove these functions after we do the capturer refactoring.
239  // For now they are set to always use the second engine for capturing, which
240  // is convenient given our intended use case.
241  bool SetCaptureDevice(const Device* device) {
242    return video2_.SetCaptureDevice(device);
243  }
244  VideoCapturer* GetVideoCapturer() const {
245    return video2_.GetVideoCapturer();
246  }
247  bool SetLocalRenderer(VideoRenderer* renderer) {
248    return video2_.SetLocalRenderer(renderer);
249  }
250  sigslot::repeater2<VideoCapturer*, CaptureState> SignalCaptureStateChange;
251
252  virtual bool HasCodec1(const VideoCodec& codec) {
253    return HasCodec(video1_, codec);
254  }
255  virtual bool HasCodec2(const VideoCodec& codec) {
256    return HasCodec(video2_, codec);
257  }
258  template<typename VIDEO>
259  bool HasCodec(const VIDEO& engine, const VideoCodec& codec) const {
260    for (std::vector<VideoCodec>::const_iterator i = engine.codecs().begin();
261         i != engine.codecs().end();
262         ++i) {
263      if (i->Matches(codec)) {
264        return true;
265      }
266    }
267    return false;
268  }
269  virtual void OnSendChange1(VideoMediaChannel* channel1, bool send) {
270  }
271  virtual void OnSendChange2(VideoMediaChannel* channel2, bool send) {
272  }
273  virtual void OnNewSendResolution(int width, int height) {
274  }
275
276 protected:
277  VIDEO1 video1_;
278  VIDEO2 video2_;
279  std::vector<VideoCodec> codecs_;
280  std::vector<RtpHeaderExtension> rtp_header_extensions_;
281};
282
283}  // namespace cricket
284
285#endif  // TALK_MEDIA_BASE_HYBRIDVIDEOENGINE_H_
286