video_frame.cc revision 0f1bc08d4cfcc34181b0b5cbf065c40f687bf740
1// Copyright (c) 2012 The Chromium Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5#include "media/base/video_frame.h" 6 7#include <algorithm> 8 9#include "base/bind.h" 10#include "base/callback_helpers.h" 11#include "base/logging.h" 12#include "base/memory/aligned_memory.h" 13#include "base/strings/string_piece.h" 14#include "media/base/limits.h" 15#include "media/base/video_util.h" 16#include "third_party/skia/include/core/SkBitmap.h" 17 18namespace media { 19 20// static 21scoped_refptr<VideoFrame> VideoFrame::CreateFrame( 22 VideoFrame::Format format, 23 const gfx::Size& coded_size, 24 const gfx::Rect& visible_rect, 25 const gfx::Size& natural_size, 26 base::TimeDelta timestamp) { 27 DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size)); 28 scoped_refptr<VideoFrame> frame(new VideoFrame( 29 format, coded_size, visible_rect, natural_size, timestamp)); 30 switch (format) { 31 case VideoFrame::RGB32: 32 frame->AllocateRGB(4u); 33 break; 34 case VideoFrame::YV12: 35 case VideoFrame::YV12A: 36 case VideoFrame::YV16: 37 case VideoFrame::I420: 38 frame->AllocateYUV(); 39 break; 40 default: 41 LOG(FATAL) << "Unsupported frame format: " << format; 42 } 43 return frame; 44} 45 46// static 47std::string VideoFrame::FormatToString(VideoFrame::Format format) { 48 switch (format) { 49 case VideoFrame::UNKNOWN: 50 return "UNKNOWN"; 51 case VideoFrame::RGB32: 52 return "RGB32"; 53 case VideoFrame::YV12: 54 return "YV12"; 55 case VideoFrame::YV16: 56 return "YV16"; 57 case VideoFrame::EMPTY: 58 return "EMPTY"; 59 case VideoFrame::I420: 60 return "I420"; 61 case VideoFrame::NATIVE_TEXTURE: 62 return "NATIVE_TEXTURE"; 63#if defined(GOOGLE_TV) 64 case VideoFrame::HOLE: 65 return "HOLE"; 66#endif 67 case VideoFrame::YV12A: 68 return "YV12A"; 69 } 70 NOTREACHED() << "Invalid videoframe format provided: " << format; 71 return ""; 72} 73 74// static 75bool VideoFrame::IsValidConfig(VideoFrame::Format format, 76 const gfx::Size& coded_size, 77 const gfx::Rect& visible_rect, 78 const gfx::Size& natural_size) { 79 return (format != VideoFrame::UNKNOWN && 80 !coded_size.IsEmpty() && 81 coded_size.GetArea() <= limits::kMaxCanvas && 82 coded_size.width() <= limits::kMaxDimension && 83 coded_size.height() <= limits::kMaxDimension && 84 !visible_rect.IsEmpty() && 85 visible_rect.x() >= 0 && visible_rect.y() >= 0 && 86 visible_rect.right() <= coded_size.width() && 87 visible_rect.bottom() <= coded_size.height() && 88 !natural_size.IsEmpty() && 89 natural_size.GetArea() <= limits::kMaxCanvas && 90 natural_size.width() <= limits::kMaxDimension && 91 natural_size.height() <= limits::kMaxDimension); 92} 93 94// static 95scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( 96 const scoped_refptr<MailboxHolder>& mailbox_holder, 97 uint32 texture_target, 98 const gfx::Size& coded_size, 99 const gfx::Rect& visible_rect, 100 const gfx::Size& natural_size, 101 base::TimeDelta timestamp, 102 const ReadPixelsCB& read_pixels_cb, 103 const base::Closure& no_longer_needed_cb) { 104 scoped_refptr<VideoFrame> frame(new VideoFrame( 105 NATIVE_TEXTURE, coded_size, visible_rect, natural_size, timestamp)); 106 frame->texture_mailbox_holder_ = mailbox_holder; 107 frame->texture_target_ = texture_target; 108 frame->read_pixels_cb_ = read_pixels_cb; 109 frame->no_longer_needed_cb_ = no_longer_needed_cb; 110 111 return frame; 112} 113 114void VideoFrame::ReadPixelsFromNativeTexture(const SkBitmap& pixels) { 115 DCHECK_EQ(format_, NATIVE_TEXTURE); 116 if (!read_pixels_cb_.is_null()) 117 read_pixels_cb_.Run(pixels); 118} 119 120// static 121scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory( 122 Format format, 123 const gfx::Size& coded_size, 124 const gfx::Rect& visible_rect, 125 const gfx::Size& natural_size, 126 uint8* data, 127 size_t data_size, 128 base::SharedMemoryHandle handle, 129 base::TimeDelta timestamp, 130 const base::Closure& no_longer_needed_cb) { 131 if (data_size < AllocationSize(format, coded_size)) 132 return NULL; 133 134 switch (format) { 135 case I420: { 136 scoped_refptr<VideoFrame> frame(new VideoFrame( 137 format, coded_size, visible_rect, natural_size, timestamp)); 138 frame->shared_memory_handle_ = handle; 139 frame->strides_[kYPlane] = coded_size.width(); 140 frame->strides_[kUPlane] = coded_size.width() / 2; 141 frame->strides_[kVPlane] = coded_size.width() / 2; 142 frame->data_[kYPlane] = data; 143 frame->data_[kUPlane] = data + coded_size.GetArea(); 144 frame->data_[kVPlane] = data + (coded_size.GetArea() * 5 / 4); 145 frame->no_longer_needed_cb_ = no_longer_needed_cb; 146 return frame; 147 } 148 default: 149 NOTIMPLEMENTED(); 150 return NULL; 151 } 152} 153 154// static 155scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData( 156 Format format, 157 const gfx::Size& coded_size, 158 const gfx::Rect& visible_rect, 159 const gfx::Size& natural_size, 160 int32 y_stride, 161 int32 u_stride, 162 int32 v_stride, 163 uint8* y_data, 164 uint8* u_data, 165 uint8* v_data, 166 base::TimeDelta timestamp, 167 const base::Closure& no_longer_needed_cb) { 168 DCHECK(format == YV12 || format == YV16 || format == I420) << format; 169 scoped_refptr<VideoFrame> frame(new VideoFrame( 170 format, coded_size, visible_rect, natural_size, timestamp)); 171 frame->strides_[kYPlane] = y_stride; 172 frame->strides_[kUPlane] = u_stride; 173 frame->strides_[kVPlane] = v_stride; 174 frame->data_[kYPlane] = y_data; 175 frame->data_[kUPlane] = u_data; 176 frame->data_[kVPlane] = v_data; 177 frame->no_longer_needed_cb_ = no_longer_needed_cb; 178 return frame; 179} 180 181// static 182scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() { 183 return new VideoFrame( 184 VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(), 185 base::TimeDelta()); 186} 187 188// static 189scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame( 190 const gfx::Size& size, 191 uint8 y, uint8 u, uint8 v, 192 base::TimeDelta timestamp) { 193 DCHECK(IsValidConfig(VideoFrame::YV12, size, gfx::Rect(size), size)); 194 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame( 195 VideoFrame::YV12, size, gfx::Rect(size), size, timestamp); 196 FillYUV(frame.get(), y, u, v); 197 return frame; 198} 199 200// static 201scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) { 202 const uint8 kBlackY = 0x00; 203 const uint8 kBlackUV = 0x80; 204 const base::TimeDelta kZero; 205 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); 206} 207 208#if defined(GOOGLE_TV) 209// This block and other blocks wrapped around #if defined(GOOGLE_TV) is not 210// maintained by the general compositor team. Please contact the following 211// people instead: 212// 213// wonsik@chromium.org 214// ycheo@chromium.org 215 216// static 217scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame( 218 const gfx::Size& size) { 219 DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size)); 220 scoped_refptr<VideoFrame> frame(new VideoFrame( 221 VideoFrame::HOLE, size, gfx::Rect(size), size, base::TimeDelta())); 222 return frame; 223} 224#endif 225 226// static 227size_t VideoFrame::NumPlanes(Format format) { 228 switch (format) { 229 case VideoFrame::NATIVE_TEXTURE: 230#if defined(GOOGLE_TV) 231 case VideoFrame::HOLE: 232#endif 233 return 0; 234 case VideoFrame::RGB32: 235 return 1; 236 case VideoFrame::YV12: 237 case VideoFrame::YV16: 238 case VideoFrame::I420: 239 return 3; 240 case VideoFrame::YV12A: 241 return 4; 242 case VideoFrame::EMPTY: 243 case VideoFrame::UNKNOWN: 244 break; 245 } 246 NOTREACHED() << "Unsupported video frame format: " << format; 247 return 0; 248} 249 250static inline size_t RoundUp(size_t value, size_t alignment) { 251 // Check that |alignment| is a power of 2. 252 DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); 253 return ((value + (alignment - 1)) & ~(alignment-1)); 254} 255 256// static 257size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) { 258 switch (format) { 259 case VideoFrame::RGB32: 260 return coded_size.GetArea() * 4; 261 case VideoFrame::YV12: 262 case VideoFrame::I420: { 263 const size_t rounded_size = 264 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2); 265 return rounded_size * 3 / 2; 266 } 267 case VideoFrame::YV12A: { 268 const size_t rounded_size = 269 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2); 270 return rounded_size * 5 / 2; 271 } 272 case VideoFrame::YV16: { 273 const size_t rounded_size = 274 RoundUp(coded_size.width(), 2) * RoundUp(coded_size.height(), 2); 275 return rounded_size * 2; 276 } 277 case VideoFrame::UNKNOWN: 278 case VideoFrame::EMPTY: 279 case VideoFrame::NATIVE_TEXTURE: 280#if defined(GOOGLE_TV) 281 case VideoFrame::HOLE: 282#endif 283 break; 284 } 285 NOTREACHED() << "Unsupported video frame format: " << format; 286 return 0; 287} 288 289// Release data allocated by AllocateRGB() or AllocateYUV(). 290static void ReleaseData(uint8* data) { 291 DCHECK(data); 292 base::AlignedFree(data); 293} 294 295void VideoFrame::AllocateRGB(size_t bytes_per_pixel) { 296 // Round up to align at least at a 16-byte boundary for each row. 297 // This is sufficient for MMX and SSE2 reads (movq/movdqa). 298 size_t bytes_per_row = RoundUp(coded_size_.width(), 299 kFrameSizeAlignment) * bytes_per_pixel; 300 size_t aligned_height = RoundUp(coded_size_.height(), kFrameSizeAlignment); 301 strides_[VideoFrame::kRGBPlane] = bytes_per_row; 302 data_[VideoFrame::kRGBPlane] = reinterpret_cast<uint8*>( 303 base::AlignedAlloc(bytes_per_row * aligned_height + kFrameSizePadding, 304 kFrameAddressAlignment)); 305 no_longer_needed_cb_ = base::Bind(&ReleaseData, data_[VideoFrame::kRGBPlane]); 306 DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7)); 307 COMPILE_ASSERT(0 == VideoFrame::kRGBPlane, RGB_data_must_be_index_0); 308} 309 310void VideoFrame::AllocateYUV() { 311 DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16 || 312 format_ == VideoFrame::YV12A || format_ == VideoFrame::I420); 313 // Align Y rows at least at 16 byte boundaries. The stride for both 314 // YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for 315 // U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in 316 // the case of YV12 the strides are identical for the same width surface, but 317 // the number of bytes allocated for YV12 is 1/2 the amount for U & V as 318 // YV16. We also round the height of the surface allocated to be an even 319 // number to avoid any potential of faulting by code that attempts to access 320 // the Y values of the final row, but assumes that the last row of U & V 321 // applies to a full two rows of Y. YV12A is the same as YV12, but with an 322 // additional alpha plane that has the same size and alignment as the Y plane. 323 324 size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane), 325 kFrameSizeAlignment); 326 size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane), 327 kFrameSizeAlignment); 328 // The *2 here is because some formats (e.g. h264) allow interlaced coding, 329 // and then the size needs to be a multiple of two macroblocks (vertically). 330 // See libavcodec/utils.c:avcodec_align_dimensions2(). 331 size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2); 332 size_t uv_height = 333 (format_ == VideoFrame::YV12 || format_ == VideoFrame::YV12A || 334 format_ == VideoFrame::I420) 335 ? y_height / 2 336 : y_height; 337 size_t y_bytes = y_height * y_stride; 338 size_t uv_bytes = uv_height * uv_stride; 339 size_t a_bytes = format_ == VideoFrame::YV12A ? y_bytes : 0; 340 341 // The extra line of UV being allocated is because h264 chroma MC 342 // overreads by one line in some cases, see libavcodec/utils.c: 343 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: 344 // put_h264_chroma_mc4_ssse3(). 345 uint8* data = reinterpret_cast<uint8*>( 346 base::AlignedAlloc( 347 y_bytes + (uv_bytes * 2 + uv_stride) + a_bytes + kFrameSizePadding, 348 kFrameAddressAlignment)); 349 no_longer_needed_cb_ = base::Bind(&ReleaseData, data); 350 COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0); 351 data_[VideoFrame::kYPlane] = data; 352 data_[VideoFrame::kUPlane] = data + y_bytes; 353 data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes; 354 strides_[VideoFrame::kYPlane] = y_stride; 355 strides_[VideoFrame::kUPlane] = uv_stride; 356 strides_[VideoFrame::kVPlane] = uv_stride; 357 if (format_ == YV12A) { 358 data_[VideoFrame::kAPlane] = data + y_bytes + (2 * uv_bytes); 359 strides_[VideoFrame::kAPlane] = y_stride; 360 } 361} 362 363VideoFrame::VideoFrame(VideoFrame::Format format, 364 const gfx::Size& coded_size, 365 const gfx::Rect& visible_rect, 366 const gfx::Size& natural_size, 367 base::TimeDelta timestamp) 368 : format_(format), 369 coded_size_(coded_size), 370 visible_rect_(visible_rect), 371 natural_size_(natural_size), 372 texture_target_(0), 373 shared_memory_handle_(base::SharedMemory::NULLHandle()), 374 timestamp_(timestamp) { 375 memset(&strides_, 0, sizeof(strides_)); 376 memset(&data_, 0, sizeof(data_)); 377} 378 379VideoFrame::~VideoFrame() { 380 if (!no_longer_needed_cb_.is_null()) 381 base::ResetAndReturn(&no_longer_needed_cb_).Run(); 382} 383 384bool VideoFrame::IsValidPlane(size_t plane) const { 385 return (plane < NumPlanes(format_)); 386} 387 388int VideoFrame::stride(size_t plane) const { 389 DCHECK(IsValidPlane(plane)); 390 return strides_[plane]; 391} 392 393int VideoFrame::row_bytes(size_t plane) const { 394 DCHECK(IsValidPlane(plane)); 395 int width = coded_size_.width(); 396 switch (format_) { 397 // 32bpp. 398 case RGB32: 399 return width * 4; 400 401 // Planar, 8bpp. 402 case YV12A: 403 if (plane == kAPlane) 404 return width; 405 // Fallthrough. 406 case YV12: 407 case YV16: 408 case I420: 409 if (plane == kYPlane) 410 return width; 411 return RoundUp(width, 2) / 2; 412 413 default: 414 break; 415 } 416 417 // Intentionally leave out non-production formats. 418 NOTREACHED() << "Unsupported video frame format: " << format_; 419 return 0; 420} 421 422int VideoFrame::rows(size_t plane) const { 423 DCHECK(IsValidPlane(plane)); 424 int height = coded_size_.height(); 425 switch (format_) { 426 case RGB32: 427 case YV16: 428 return height; 429 430 case YV12A: 431 if (plane == kAPlane) 432 return height; 433 // Fallthrough. 434 case YV12: 435 case I420: 436 if (plane == kYPlane) 437 return height; 438 return RoundUp(height, 2) / 2; 439 440 default: 441 break; 442 } 443 444 // Intentionally leave out non-production formats. 445 NOTREACHED() << "Unsupported video frame format: " << format_; 446 return 0; 447} 448 449uint8* VideoFrame::data(size_t plane) const { 450 DCHECK(IsValidPlane(plane)); 451 return data_[plane]; 452} 453 454const scoped_refptr<VideoFrame::MailboxHolder>& VideoFrame::texture_mailbox() 455 const { 456 DCHECK_EQ(format_, NATIVE_TEXTURE); 457 return texture_mailbox_holder_; 458} 459 460uint32 VideoFrame::texture_target() const { 461 DCHECK_EQ(format_, NATIVE_TEXTURE); 462 return texture_target_; 463} 464 465base::SharedMemoryHandle VideoFrame::shared_memory_handle() const { 466 return shared_memory_handle_; 467} 468 469bool VideoFrame::IsEndOfStream() const { 470 return format_ == VideoFrame::EMPTY; 471} 472 473void VideoFrame::HashFrameForTesting(base::MD5Context* context) { 474 for (int plane = 0; plane < kMaxPlanes; ++plane) { 475 if (!IsValidPlane(plane)) 476 break; 477 for (int row = 0; row < rows(plane); ++row) { 478 base::MD5Update(context, base::StringPiece( 479 reinterpret_cast<char*>(data(plane) + stride(plane) * row), 480 row_bytes(plane))); 481 } 482 } 483} 484 485VideoFrame::MailboxHolder::MailboxHolder( 486 const gpu::Mailbox& mailbox, 487 unsigned sync_point, 488 const TextureNoLongerNeededCallback& release_callback) 489 : mailbox_(mailbox), 490 sync_point_(sync_point), 491 release_callback_(release_callback) {} 492 493VideoFrame::MailboxHolder::~MailboxHolder() { 494 if (!release_callback_.is_null()) 495 release_callback_.Run(sync_point_); 496} 497 498} // namespace media 499