171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin/*
271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *
471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  Use of this source code is governed by a BSD-style license
571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  that can be found in the LICENSE file in the root of the source
671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  tree. An additional intellectual property rights grant can be found
771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  in the file PATENTS.  All contributing project authors may
871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *  be found in the AUTHORS file in the root of the source tree.
971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin *
1071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin */
1171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
1271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_decoder.h"
1371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
1471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#if defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
1571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
1671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "libyuv/convert.h"
1771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "webrtc/base/checks.h"
1871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "webrtc/base/logging.h"
196f8ce060a21fcdc1c951fbf06768eb0cc0083b2fkjellander#include "webrtc/common_video/include/video_frame_buffer.h"
2071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "webrtc/modules/video_coding/codecs/h264/h264_video_toolbox_nalu.h"
2171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#include "webrtc/video_frame.h"
2271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
2371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinnamespace internal {
2471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
2571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// Convenience function for creating a dictionary.
2671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chininline CFDictionaryRef CreateCFDictionary(CFTypeRef* keys,
2771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                          CFTypeRef* values,
2871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                          size_t size) {
2971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return CFDictionaryCreate(nullptr, keys, values, size,
3071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                            &kCFTypeDictionaryKeyCallBacks,
3171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                            &kCFTypeDictionaryValueCallBacks);
3271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
3371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
3471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// Struct that we pass to the decoder per frame to decode. We receive it again
3571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// in the decoder callback.
3671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinstruct FrameDecodeParams {
3771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  FrameDecodeParams(webrtc::DecodedImageCallback* cb, int64_t ts)
3871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      : callback(cb), timestamp(ts) {}
3971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  webrtc::DecodedImageCallback* callback;
4071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  int64_t timestamp;
4171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin};
4271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
4371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// On decode we receive a CVPixelBuffer, which we need to convert to a frame
4471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// buffer for use in the rest of WebRTC. Unfortunately this involves a frame
4571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// copy.
4671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// TODO(tkchin): Stuff CVPixelBuffer into a TextureBuffer and pass that along
4771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// instead once the pipeline supports it.
4871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinrtc::scoped_refptr<webrtc::VideoFrameBuffer> VideoFrameBufferForPixelBuffer(
4971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CVPixelBufferRef pixel_buffer) {
5091d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(pixel_buffer);
5191d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) ==
5291d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg             kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
5371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  size_t width = CVPixelBufferGetWidthOfPlane(pixel_buffer, 0);
5471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  size_t height = CVPixelBufferGetHeightOfPlane(pixel_buffer, 0);
5571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // TODO(tkchin): Use a frame buffer pool.
5671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
5771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      new rtc::RefCountedObject<webrtc::I420Buffer>(width, height);
5871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CVPixelBufferLockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly);
590c4e06b4c6107a1b94f764e279e4fb4161e905b0Peter Boström  const uint8_t* src_y = reinterpret_cast<const uint8_t*>(
6071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 0));
6171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  int src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 0);
620c4e06b4c6107a1b94f764e279e4fb4161e905b0Peter Boström  const uint8_t* src_uv = reinterpret_cast<const uint8_t*>(
6371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1));
6471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  int src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
6571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  int ret = libyuv::NV12ToI420(
6671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      src_y, src_y_stride, src_uv, src_uv_stride,
673318f984cd7f51d24da4726665c05f5f06f82e6dMagnus Jedvert      buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
683318f984cd7f51d24da4726665c05f5f06f82e6dMagnus Jedvert      buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
693318f984cd7f51d24da4726665c05f5f06f82e6dMagnus Jedvert      buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
7071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      width, height);
7171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CVPixelBufferUnlockBaseAddress(pixel_buffer, kCVPixelBufferLock_ReadOnly);
7271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (ret) {
7371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    LOG(LS_ERROR) << "Error converting NV12 to I420: " << ret;
7471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return nullptr;
7571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
7671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return buffer;
7771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
7871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
7971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// This is the callback function that VideoToolbox calls when decode is
8071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin// complete.
8171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinvoid VTDecompressionOutputCallback(void* decoder,
8271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   void* params,
8371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   OSStatus status,
8471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   VTDecodeInfoFlags info_flags,
8571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   CVImageBufferRef image_buffer,
8671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   CMTime timestamp,
8771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   CMTime duration) {
8871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  rtc::scoped_ptr<FrameDecodeParams> decode_params(
8971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      reinterpret_cast<FrameDecodeParams*>(params));
9071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (status != noErr) {
9171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    LOG(LS_ERROR) << "Failed to decode frame. Status: " << status;
9271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return;
9371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
9471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // TODO(tkchin): Handle CVO properly.
9571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
9671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      VideoFrameBufferForPixelBuffer(image_buffer);
9771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  webrtc::VideoFrame decoded_frame(buffer, decode_params->timestamp, 0,
9871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   webrtc::kVideoRotation_0);
9971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  decode_params->callback->Decoded(decoded_frame);
10071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
10171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
10271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}  // namespace internal
10371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
10471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinnamespace webrtc {
10571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
10671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke ChinH264VideoToolboxDecoder::H264VideoToolboxDecoder()
10771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    : callback_(nullptr),
10871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      video_format_(nullptr),
109cce46fc108a70336f0477fd58d41f38e547eeb25philipel      decompression_session_(nullptr) {}
11071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
11171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke ChinH264VideoToolboxDecoder::~H264VideoToolboxDecoder() {
11271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  DestroyDecompressionSession();
11371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  SetVideoFormat(nullptr);
11471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
11571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
11671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::InitDecode(const VideoCodec* video_codec,
11771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                        int number_of_cores) {
11871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
11971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
12071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
12171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::Decode(
12271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    const EncodedImage& input_image,
12371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    bool missing_frames,
12471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    const RTPFragmentationHeader* fragmentation,
12571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    const CodecSpecificInfo* codec_specific_info,
12671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    int64_t render_time_ms) {
12791d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(input_image._buffer);
12871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
12971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CMSampleBufferRef sample_buffer = nullptr;
13071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (!H264AnnexBBufferToCMSampleBuffer(input_image._buffer,
131cce46fc108a70336f0477fd58d41f38e547eeb25philipel                                        input_image._length, video_format_,
13271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                        &sample_buffer)) {
13371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return WEBRTC_VIDEO_CODEC_ERROR;
13471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
13591d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(sample_buffer);
13671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // Check if the video format has changed, and reinitialize decoder if needed.
13771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CMVideoFormatDescriptionRef description =
13871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      CMSampleBufferGetFormatDescription(sample_buffer);
13971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (!CMFormatDescriptionEqual(description, video_format_)) {
14071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    SetVideoFormat(description);
14171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    ResetDecompressionSession();
14271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
14371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  VTDecodeFrameFlags decode_flags =
14471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      kVTDecodeFrame_EnableAsynchronousDecompression;
14571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  rtc::scoped_ptr<internal::FrameDecodeParams> frame_decode_params;
14671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  frame_decode_params.reset(
14771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      new internal::FrameDecodeParams(callback_, input_image._timeStamp));
14871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  OSStatus status = VTDecompressionSessionDecodeFrame(
14971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      decompression_session_, sample_buffer, decode_flags,
15071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      frame_decode_params.release(), nullptr);
15171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFRelease(sample_buffer);
15271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (status != noErr) {
15371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    LOG(LS_ERROR) << "Failed to decode frame with code: " << status;
15471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return WEBRTC_VIDEO_CODEC_ERROR;
15571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
15671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
15771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
15871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
15971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::RegisterDecodeCompleteCallback(
16071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    DecodedImageCallback* callback) {
16191d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(!callback_);
16271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  callback_ = callback;
16371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
16471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
16571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
16671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::Release() {
16771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  callback_ = nullptr;
16871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
16971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
17071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
17171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::Reset() {
17271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  ResetDecompressionSession();
17371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
17471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
17571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
17671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinint H264VideoToolboxDecoder::ResetDecompressionSession() {
17771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  DestroyDecompressionSession();
17871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
17971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // Need to wait for the first SPS to initialize decoder.
18071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (!video_format_) {
18171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return WEBRTC_VIDEO_CODEC_OK;
18271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
18371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
18471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // Set keys for OpenGL and IOSurface compatibilty, which makes the encoder
18571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // create pixel buffers with GPU backed memory. The intent here is to pass
18671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // the pixel buffers directly so we avoid a texture upload later during
18771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // rendering. This currently is moot because we are converting back to an
18871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // I420 frame after decode, but eventually we will be able to plumb
18971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // CVPixelBuffers directly to the renderer.
19071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // TODO(tkchin): Maybe only set OpenGL/IOSurface keys if we know that that
19171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  // we can pass CVPixelBuffers as native handles in decoder output.
19271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  static size_t const attributes_size = 3;
19371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFTypeRef keys[attributes_size] = {
19471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#if defined(WEBRTC_IOS)
19571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    kCVPixelBufferOpenGLESCompatibilityKey,
19671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#elif defined(WEBRTC_MAC)
19771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    kCVPixelBufferOpenGLCompatibilityKey,
19871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#endif
19971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    kCVPixelBufferIOSurfacePropertiesKey,
20071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    kCVPixelBufferPixelFormatTypeKey
20171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  };
20271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFDictionaryRef io_surface_value =
20371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      internal::CreateCFDictionary(nullptr, nullptr, 0);
20471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  int64_t nv12type = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
20571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFNumberRef pixel_format =
20671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      CFNumberCreate(nullptr, kCFNumberLongType, &nv12type);
207cce46fc108a70336f0477fd58d41f38e547eeb25philipel  CFTypeRef values[attributes_size] = {kCFBooleanTrue, io_surface_value,
208cce46fc108a70336f0477fd58d41f38e547eeb25philipel                                       pixel_format};
20971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFDictionaryRef attributes =
21071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      internal::CreateCFDictionary(keys, values, attributes_size);
21171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (io_surface_value) {
21271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CFRelease(io_surface_value);
21371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    io_surface_value = nullptr;
21471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
21571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (pixel_format) {
21671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CFRelease(pixel_format);
21771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    pixel_format = nullptr;
21871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
21971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  VTDecompressionOutputCallbackRecord record = {
22071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      internal::VTDecompressionOutputCallback, this,
22171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  };
22271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  OSStatus status =
22371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin      VTDecompressionSessionCreate(nullptr, video_format_, nullptr, attributes,
22471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                                   &record, &decompression_session_);
22571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  CFRelease(attributes);
22671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (status != noErr) {
22771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    DestroyDecompressionSession();
22871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return WEBRTC_VIDEO_CODEC_ERROR;
22971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
23071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  ConfigureDecompressionSession();
23171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
23271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  return WEBRTC_VIDEO_CODEC_OK;
23371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
23471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
23571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinvoid H264VideoToolboxDecoder::ConfigureDecompressionSession() {
23691d6edef35e7275879c30ce16ecb8b6dc73c6e4ahenrikg  RTC_DCHECK(decompression_session_);
23771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#if defined(WEBRTC_IOS)
23871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  VTSessionSetProperty(decompression_session_,
23971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin                       kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue);
24071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#endif
24171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
24271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
24371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinvoid H264VideoToolboxDecoder::DestroyDecompressionSession() {
24471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (decompression_session_) {
24571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    VTDecompressionSessionInvalidate(decompression_session_);
24671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    decompression_session_ = nullptr;
24771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
24871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
24971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
25071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chinvoid H264VideoToolboxDecoder::SetVideoFormat(
25171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CMVideoFormatDescriptionRef video_format) {
25271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (video_format_ == video_format) {
25371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    return;
25471f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
25571f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (video_format_) {
25671f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CFRelease(video_format_);
25771f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
25871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  video_format_ = video_format;
25971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  if (video_format_) {
26071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin    CFRetain(video_format_);
26171f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin  }
26271f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}
26371f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
264b7d9a97ce41022e984348efb5f28bf6dd6c6b779Peter Boströmconst char* H264VideoToolboxDecoder::ImplementationName() const {
265b7d9a97ce41022e984348efb5f28bf6dd6c6b779Peter Boström  return "VideoToolbox";
266b7d9a97ce41022e984348efb5f28bf6dd6c6b779Peter Boström}
267b7d9a97ce41022e984348efb5f28bf6dd6c6b779Peter Boström
26871f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin}  // namespace webrtc
26971f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin
27071f6f4405c1c5f60097f8d10841378088e78e8b9Zeke Chin#endif  // defined(WEBRTC_VIDEO_TOOLBOX_SUPPORTED)
271