1/*
2 * libjingle
3 * Copyright 2011 Google Inc.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions are met:
7 *
8 *  1. Redistributions of source code must retain the above copyright notice,
9 *     this list of conditions and the following disclaimer.
10 *  2. Redistributions in binary form must reproduce the above copyright notice,
11 *     this list of conditions and the following disclaimer in the documentation
12 *     and/or other materials provided with the distribution.
13 *  3. The name of the author may not be used to endorse or promote products
14 *     derived from this software without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
17 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
19 * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
20 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
22 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
23 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
24 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
25 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28#include "talk/media/webrtc/webrtcvideoframe.h"
29
30#include "libyuv/convert.h"
31#include "libyuv/convert_from.h"
32#include "libyuv/planar_functions.h"
33#include "talk/base/logging.h"
34#include "talk/media/base/videocapturer.h"
35#include "talk/media/base/videocommon.h"
36
37namespace cricket {
38
39static const int kWatermarkWidth = 8;
40static const int kWatermarkHeight = 8;
41static const int kWatermarkOffsetFromLeft = 8;
42static const int kWatermarkOffsetFromBottom = 8;
43static const unsigned char kWatermarkMaxYValue = 64;
44
45// Class that wraps ownerhip semantics of a buffer passed to it.
46// * Buffers passed using Attach() become owned by this FrameBuffer and will be
47//   destroyed on FrameBuffer destruction.
48// * Buffers passed using Alias() are not owned and will not be destroyed on
49//   FrameBuffer destruction,  The buffer then must outlive the FrameBuffer.
50class WebRtcVideoFrame::FrameBuffer {
51 public:
52  FrameBuffer();
53  explicit FrameBuffer(size_t length);
54  ~FrameBuffer();
55
56  void Attach(uint8* data, size_t length);
57  void Alias(uint8* data, size_t length);
58  uint8* data();
59  size_t length() const;
60
61  webrtc::VideoFrame* frame();
62  const webrtc::VideoFrame* frame() const;
63
64 private:
65  talk_base::scoped_ptr<uint8[]> owned_data_;
66  webrtc::VideoFrame video_frame_;
67};
68
69WebRtcVideoFrame::FrameBuffer::FrameBuffer() {}
70
71WebRtcVideoFrame::FrameBuffer::FrameBuffer(size_t length) {
72  uint8* buffer = new uint8[length];
73  Attach(buffer, length);
74}
75
76WebRtcVideoFrame::FrameBuffer::~FrameBuffer() {
77  // Make sure that |video_frame_| doesn't delete the buffer, as |owned_data_|
78  // will release the buffer if this FrameBuffer owns it.
79  uint8_t* new_memory = NULL;
80  uint32_t new_length = 0;
81  uint32_t new_size = 0;
82  video_frame_.Swap(new_memory, new_length, new_size);
83}
84
85void WebRtcVideoFrame::FrameBuffer::Attach(uint8* data, size_t length) {
86  Alias(data, length);
87  owned_data_.reset(data);
88}
89
90void WebRtcVideoFrame::FrameBuffer::Alias(uint8* data, size_t length) {
91  owned_data_.reset();
92  uint8_t* new_memory = reinterpret_cast<uint8_t*>(data);
93  uint32_t new_length = static_cast<uint32_t>(length);
94  uint32_t new_size = static_cast<uint32_t>(length);
95  video_frame_.Swap(new_memory, new_length, new_size);
96}
97
98uint8* WebRtcVideoFrame::FrameBuffer::data() {
99  return video_frame_.Buffer();
100}
101
102size_t WebRtcVideoFrame::FrameBuffer::length() const {
103  return video_frame_.Length();
104}
105
106webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() {
107  return &video_frame_;
108}
109
110const webrtc::VideoFrame* WebRtcVideoFrame::FrameBuffer::frame() const {
111  return &video_frame_;
112}
113
114WebRtcVideoFrame::WebRtcVideoFrame()
115    : video_buffer_(new RefCountedBuffer()), is_black_(false) {}
116
117WebRtcVideoFrame::~WebRtcVideoFrame() {}
118
119bool WebRtcVideoFrame::Init(
120    uint32 format, int w, int h, int dw, int dh, uint8* sample,
121    size_t sample_size, size_t pixel_width, size_t pixel_height,
122    int64 elapsed_time, int64 time_stamp, int rotation) {
123  return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
124               pixel_height, elapsed_time, time_stamp, rotation);
125}
126
127bool WebRtcVideoFrame::Init(const CapturedFrame* frame, int dw, int dh) {
128  return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
129               static_cast<uint8*>(frame->data), frame->data_size,
130               frame->pixel_width, frame->pixel_height, frame->elapsed_time,
131               frame->time_stamp, frame->rotation);
132}
133
134bool WebRtcVideoFrame::Alias(const CapturedFrame* frame, int dw, int dh) {
135  if (CanonicalFourCC(frame->fourcc) != FOURCC_I420 || frame->rotation != 0 ||
136      frame->width != dw || frame->height != dh) {
137    // TODO(fbarchard): Enable aliasing of more formats.
138    return Init(frame, dw, dh);
139  } else {
140    Alias(static_cast<uint8*>(frame->data),
141          frame->data_size,
142          frame->width,
143          frame->height,
144          frame->pixel_width,
145          frame->pixel_height,
146          frame->elapsed_time,
147          frame->time_stamp,
148          frame->rotation);
149    return true;
150  }
151}
152
153bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
154                                   size_t pixel_height, int64 elapsed_time,
155                                   int64 time_stamp) {
156  InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time, time_stamp);
157  if (!is_black_) {
158    return SetToBlack();
159  }
160  return true;
161}
162
163void WebRtcVideoFrame::Alias(
164    uint8* buffer, size_t buffer_size, int w, int h, size_t pixel_width,
165    size_t pixel_height, int64 elapsed_time, int64 time_stamp, int rotation) {
166  talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
167      new RefCountedBuffer());
168  video_buffer->Alias(buffer, buffer_size);
169  Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
170         elapsed_time, time_stamp, rotation);
171}
172
173size_t WebRtcVideoFrame::GetWidth() const { return frame()->Width(); }
174
175size_t WebRtcVideoFrame::GetHeight() const { return frame()->Height(); }
176
177const uint8* WebRtcVideoFrame::GetYPlane() const {
178  uint8_t* buffer = frame()->Buffer();
179  return buffer;
180}
181
182const uint8* WebRtcVideoFrame::GetUPlane() const {
183  uint8_t* buffer = frame()->Buffer();
184  if (buffer) {
185    buffer += (frame()->Width() * frame()->Height());
186  }
187  return buffer;
188}
189
190const uint8* WebRtcVideoFrame::GetVPlane() const {
191  uint8_t* buffer = frame()->Buffer();
192  if (buffer) {
193    int uv_size = static_cast<int>(GetChromaSize());
194    buffer += frame()->Width() * frame()->Height() + uv_size;
195  }
196  return buffer;
197}
198
199uint8* WebRtcVideoFrame::GetYPlane() {
200  uint8_t* buffer = frame()->Buffer();
201  return buffer;
202}
203
204uint8* WebRtcVideoFrame::GetUPlane() {
205  uint8_t* buffer = frame()->Buffer();
206  if (buffer) {
207    buffer += (frame()->Width() * frame()->Height());
208  }
209  return buffer;
210}
211
212uint8* WebRtcVideoFrame::GetVPlane() {
213  uint8_t* buffer = frame()->Buffer();
214  if (buffer) {
215    int uv_size = static_cast<int>(GetChromaSize());
216    buffer += frame()->Width() * frame()->Height() + uv_size;
217  }
218  return buffer;
219}
220
221VideoFrame* WebRtcVideoFrame::Copy() const {
222  uint8* old_buffer = video_buffer_->data();
223  if (!old_buffer)
224    return NULL;
225  size_t new_buffer_size = video_buffer_->length();
226
227  WebRtcVideoFrame* ret_val = new WebRtcVideoFrame();
228  ret_val->Attach(video_buffer_.get(), new_buffer_size, frame()->Width(),
229                  frame()->Height(), pixel_width_, pixel_height_, elapsed_time_,
230                  time_stamp_, rotation_);
231  return ret_val;
232}
233
234bool WebRtcVideoFrame::MakeExclusive() {
235  const size_t length = video_buffer_->length();
236  RefCountedBuffer* exclusive_buffer = new RefCountedBuffer(length);
237  memcpy(exclusive_buffer->data(), video_buffer_->data(), length);
238  Attach(exclusive_buffer, length, frame()->Width(), frame()->Height(),
239         pixel_width_, pixel_height_, elapsed_time_, time_stamp_, rotation_);
240  return true;
241}
242
243size_t WebRtcVideoFrame::CopyToBuffer(uint8* buffer, size_t size) const {
244  if (!frame()->Buffer()) {
245    return 0;
246  }
247
248  size_t needed = frame()->Length();
249  if (needed <= size) {
250    memcpy(buffer, frame()->Buffer(), needed);
251  }
252  return needed;
253}
254
255// TODO(fbarchard): Refactor into base class and share with lmi
256size_t WebRtcVideoFrame::ConvertToRgbBuffer(uint32 to_fourcc, uint8* buffer,
257                                            size_t size, int stride_rgb) const {
258  if (!frame()->Buffer()) {
259    return 0;
260  }
261  size_t width = frame()->Width();
262  size_t height = frame()->Height();
263  size_t needed = (stride_rgb >= 0 ? stride_rgb : -stride_rgb) * height;
264  if (size < needed) {
265    LOG(LS_WARNING) << "RGB buffer is not large enough";
266    return needed;
267  }
268
269  if (libyuv::ConvertFromI420(GetYPlane(), GetYPitch(), GetUPlane(),
270                              GetUPitch(), GetVPlane(), GetVPitch(), buffer,
271                              stride_rgb,
272                              static_cast<int>(width),
273                              static_cast<int>(height),
274                              to_fourcc)) {
275    LOG(LS_WARNING) << "RGB type not supported: " << to_fourcc;
276    return 0;  // 0 indicates error
277  }
278  return needed;
279}
280
281void WebRtcVideoFrame::Attach(
282    RefCountedBuffer* video_buffer, size_t buffer_size, int w, int h,
283    size_t pixel_width, size_t pixel_height, int64 elapsed_time,
284    int64 time_stamp, int rotation) {
285  if (video_buffer_.get() == video_buffer) {
286    return;
287  }
288  is_black_ = false;
289  video_buffer_ = video_buffer;
290  frame()->SetWidth(w);
291  frame()->SetHeight(h);
292  pixel_width_ = pixel_width;
293  pixel_height_ = pixel_height;
294  elapsed_time_ = elapsed_time;
295  time_stamp_ = time_stamp;
296  rotation_ = rotation;
297}
298
299// Add a square watermark near the left-low corner. clamp Y.
300// Returns false on error.
301bool WebRtcVideoFrame::AddWatermark() {
302  size_t w = GetWidth();
303  size_t h = GetHeight();
304
305  if (w < kWatermarkWidth + kWatermarkOffsetFromLeft ||
306      h < kWatermarkHeight + kWatermarkOffsetFromBottom) {
307    return false;
308  }
309
310  uint8* buffer = GetYPlane();
311  for (size_t x = kWatermarkOffsetFromLeft;
312       x < kWatermarkOffsetFromLeft + kWatermarkWidth; ++x) {
313    for (size_t y = h - kWatermarkOffsetFromBottom - kWatermarkHeight;
314         y < h - kWatermarkOffsetFromBottom; ++y) {
315      buffer[y * w + x] =
316          talk_base::_min(buffer[y * w + x], kWatermarkMaxYValue);
317    }
318  }
319  return true;
320}
321
322webrtc::VideoFrame* WebRtcVideoFrame::frame() {
323  return video_buffer_->frame();
324}
325
326const webrtc::VideoFrame* WebRtcVideoFrame::frame() const {
327  return video_buffer_->frame();
328}
329
330bool WebRtcVideoFrame::Reset(
331    uint32 format, int w, int h, int dw, int dh, uint8* sample,
332    size_t sample_size, size_t pixel_width, size_t pixel_height,
333    int64 elapsed_time, int64 time_stamp, int rotation) {
334  if (!Validate(format, w, h, sample, sample_size)) {
335    return false;
336  }
337  // Translate aliases to standard enums (e.g., IYUV -> I420).
338  format = CanonicalFourCC(format);
339
340  // Round display width and height down to multiple of 4, to avoid webrtc
341  // size calculation error on odd sizes.
342  // TODO(Ronghua): Remove this once the webrtc allocator is fixed.
343  dw = (dw > 4) ? (dw & ~3) : dw;
344  dh = (dh > 4) ? (dh & ~3) : dh;
345
346  // Set up a new buffer.
347  // TODO(fbarchard): Support lazy allocation.
348  int new_width = dw;
349  int new_height = dh;
350  if (rotation == 90 || rotation == 270) {  // If rotated swap width, height.
351    new_width = dh;
352    new_height = dw;
353  }
354
355  size_t desired_size = SizeOf(new_width, new_height);
356  talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
357      new RefCountedBuffer(desired_size));
358  // Since the libyuv::ConvertToI420 will handle the rotation, so the
359  // new frame's rotation should always be 0.
360  Attach(video_buffer.get(), desired_size, new_width, new_height, pixel_width,
361         pixel_height, elapsed_time, time_stamp, 0);
362
363  int horiz_crop = ((w - dw) / 2) & ~1;
364  // ARGB on Windows has negative height.
365  // The sample's layout in memory is normal, so just correct crop.
366  int vert_crop = ((abs(h) - dh) / 2) & ~1;
367  // Conversion functions expect negative height to flip the image.
368  int idh = (h < 0) ? -dh : dh;
369  uint8* y = GetYPlane();
370  int y_stride = GetYPitch();
371  uint8* u = GetUPlane();
372  int u_stride = GetUPitch();
373  uint8* v = GetVPlane();
374  int v_stride = GetVPitch();
375  int r = libyuv::ConvertToI420(
376      sample, sample_size, y, y_stride, u, u_stride, v, v_stride, horiz_crop,
377      vert_crop, w, h, dw, idh, static_cast<libyuv::RotationMode>(rotation),
378      format);
379  if (r) {
380    LOG(LS_ERROR) << "Error parsing format: " << GetFourccName(format)
381                  << " return code : " << r;
382    return false;
383  }
384  return true;
385}
386
387VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
388    int w, int h, size_t pixel_width, size_t pixel_height, int64 elapsed_time,
389    int64 time_stamp) const {
390  WebRtcVideoFrame* frame = new WebRtcVideoFrame();
391  frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time,
392                           time_stamp);
393  return frame;
394}
395
396void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
397                                         size_t pixel_height,
398                                         int64 elapsed_time, int64 time_stamp) {
399  size_t buffer_size = VideoFrame::SizeOf(w, h);
400  talk_base::scoped_refptr<RefCountedBuffer> video_buffer(
401      new RefCountedBuffer(buffer_size));
402  Attach(video_buffer.get(), buffer_size, w, h, pixel_width, pixel_height,
403         elapsed_time, time_stamp, 0);
404}
405
406}  // namespace cricket
407