1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/common/gpu/media/dxva_video_decode_accelerator.h"
6
7#if !defined(OS_WIN)
8#error This file should only be built on Windows.
9#endif   // !defined(OS_WIN)
10
11#include <ks.h>
12#include <codecapi.h>
13#include <mfapi.h>
14#include <mferror.h>
15#include <wmcodecdsp.h>
16
17#include "base/bind.h"
18#include "base/callback.h"
19#include "base/command_line.h"
20#include "base/debug/trace_event.h"
21#include "base/logging.h"
22#include "base/memory/scoped_handle.h"
23#include "base/memory/scoped_ptr.h"
24#include "base/memory/shared_memory.h"
25#include "base/message_loop/message_loop.h"
26#include "media/video/video_decode_accelerator.h"
27#include "ui/gl/gl_bindings.h"
28#include "ui/gl/gl_surface_egl.h"
29#include "ui/gl/gl_switches.h"
30
31namespace content {
32
33// We only request 5 picture buffers from the client which are used to hold the
34// decoded samples. These buffers are then reused when the client tells us that
35// it is done with the buffer.
36static const int kNumPictureBuffers = 5;
37
38bool DXVAVideoDecodeAccelerator::pre_sandbox_init_done_ = false;
39uint32 DXVAVideoDecodeAccelerator::dev_manager_reset_token_ = 0;
40IDirect3DDeviceManager9* DXVAVideoDecodeAccelerator::device_manager_ = NULL;
41IDirect3DDevice9Ex* DXVAVideoDecodeAccelerator::device_ = NULL;
42IDirect3DQuery9* DXVAVideoDecodeAccelerator::query_ = NULL;
43IDirect3D9Ex* DXVAVideoDecodeAccelerator::d3d9_ = NULL;
44
45#define RETURN_ON_FAILURE(result, log, ret)  \
46  do {                                       \
47    if (!(result)) {                         \
48      DLOG(ERROR) << log;                    \
49      return ret;                            \
50    }                                        \
51  } while (0)
52
53#define RETURN_ON_HR_FAILURE(result, log, ret)                    \
54  RETURN_ON_FAILURE(SUCCEEDED(result),                            \
55                    log << ", HRESULT: 0x" << std::hex << result, \
56                    ret);
57
58#define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret)  \
59  do {                                                              \
60    if (!(result)) {                                                \
61      DVLOG(1) << log;                                              \
62      StopOnError(error_code);                                      \
63      return ret;                                                   \
64    }                                                               \
65  } while (0)
66
67#define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret)  \
68  RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result),                      \
69                               log << ", HRESULT: 0x" << std::hex << result, \
70                               error_code, ret);
71
72// Maximum number of iterations we allow before aborting the attempt to flush
73// the batched queries to the driver and allow torn/corrupt frames to be
74// rendered.
75enum { kMaxIterationsForD3DFlush = 10 };
76
77static IMFSample* CreateEmptySample() {
78  base::win::ScopedComPtr<IMFSample> sample;
79  HRESULT hr = MFCreateSample(sample.Receive());
80  RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
81  return sample.Detach();
82}
83
84// Creates a Media Foundation sample with one buffer of length |buffer_length|
85// on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
86static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
87  CHECK_GT(buffer_length, 0);
88
89  base::win::ScopedComPtr<IMFSample> sample;
90  sample.Attach(CreateEmptySample());
91
92  base::win::ScopedComPtr<IMFMediaBuffer> buffer;
93  HRESULT hr = E_FAIL;
94  if (align == 0) {
95    // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
96    // with the align argument being 0.
97    hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
98  } else {
99    hr = MFCreateAlignedMemoryBuffer(buffer_length,
100                                     align - 1,
101                                     buffer.Receive());
102  }
103  RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
104
105  hr = sample->AddBuffer(buffer);
106  RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
107
108  return sample.Detach();
109}
110
111// Creates a Media Foundation sample with one buffer containing a copy of the
112// given Annex B stream data.
113// If duration and sample time are not known, provide 0.
114// |min_size| specifies the minimum size of the buffer (might be required by
115// the decoder for input). If no alignment is required, provide 0.
116static IMFSample* CreateInputSample(const uint8* stream, int size,
117                                    int min_size, int alignment) {
118  CHECK(stream);
119  CHECK_GT(size, 0);
120  base::win::ScopedComPtr<IMFSample> sample;
121  sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
122                                            alignment));
123  RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL);
124
125  base::win::ScopedComPtr<IMFMediaBuffer> buffer;
126  HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
127  RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
128
129  DWORD max_length = 0;
130  DWORD current_length = 0;
131  uint8* destination = NULL;
132  hr = buffer->Lock(&destination, &max_length, &current_length);
133  RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
134
135  CHECK_EQ(current_length, 0u);
136  CHECK_GE(static_cast<int>(max_length), size);
137  memcpy(destination, stream, size);
138
139  hr = buffer->Unlock();
140  RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
141
142  hr = buffer->SetCurrentLength(size);
143  RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
144
145  return sample.Detach();
146}
147
148static IMFSample* CreateSampleFromInputBuffer(
149    const media::BitstreamBuffer& bitstream_buffer,
150    DWORD stream_size,
151    DWORD alignment) {
152  base::SharedMemory shm(bitstream_buffer.handle(), true);
153  RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
154                    "Failed in base::SharedMemory::Map", NULL);
155
156  return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
157                           bitstream_buffer.size(),
158                           stream_size,
159                           alignment);
160}
161
162// Maintains information about a DXVA picture buffer, i.e. whether it is
163// available for rendering, the texture information, etc.
164struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
165 public:
166  static linked_ptr<DXVAPictureBuffer> Create(
167      const media::PictureBuffer& buffer, EGLConfig egl_config);
168  ~DXVAPictureBuffer();
169
170  void ReusePictureBuffer();
171  // Copies the output sample data to the picture buffer provided by the
172  // client.
173  // The dest_surface parameter contains the decoded bits.
174  bool CopyOutputSampleDataToPictureBuffer(IDirect3DSurface9* dest_surface);
175
176  bool available() const {
177    return available_;
178  }
179
180  void set_available(bool available) {
181    available_ = available;
182  }
183
184  int id() const {
185    return picture_buffer_.id();
186  }
187
188 private:
189  explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
190
191  bool available_;
192  media::PictureBuffer picture_buffer_;
193  EGLSurface decoding_surface_;
194  base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
195
196  DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
197};
198
199// static
200linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
201    DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
202        const media::PictureBuffer& buffer, EGLConfig egl_config) {
203  linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
204
205  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
206
207  EGLint attrib_list[] = {
208    EGL_WIDTH, buffer.size().width(),
209    EGL_HEIGHT, buffer.size().height(),
210    EGL_TEXTURE_FORMAT, EGL_TEXTURE_RGB,
211    EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
212    EGL_NONE
213  };
214
215  picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
216      egl_display,
217      egl_config,
218      attrib_list);
219  RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
220                    "Failed to create surface",
221                    linked_ptr<DXVAPictureBuffer>(NULL));
222
223  HANDLE share_handle = NULL;
224  EGLBoolean ret = eglQuerySurfacePointerANGLE(
225      egl_display,
226      picture_buffer->decoding_surface_,
227      EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
228      &share_handle);
229
230  RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
231                    "Failed to query ANGLE surface pointer",
232                    linked_ptr<DXVAPictureBuffer>(NULL));
233
234  HRESULT hr = DXVAVideoDecodeAccelerator::device_->CreateTexture(
235      buffer.size().width(),
236      buffer.size().height(),
237      1,
238      D3DUSAGE_RENDERTARGET,
239      D3DFMT_X8R8G8B8,
240      D3DPOOL_DEFAULT,
241      picture_buffer->decoding_texture_.Receive(),
242      &share_handle);
243
244  RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
245                       linked_ptr<DXVAPictureBuffer>(NULL));
246  return picture_buffer;
247}
248
249DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
250    const media::PictureBuffer& buffer)
251    : available_(true),
252      picture_buffer_(buffer),
253      decoding_surface_(NULL) {
254}
255
256DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
257  if (decoding_surface_) {
258    EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
259
260    eglReleaseTexImage(
261        egl_display,
262        decoding_surface_,
263        EGL_BACK_BUFFER);
264
265    eglDestroySurface(
266        egl_display,
267        decoding_surface_);
268    decoding_surface_ = NULL;
269  }
270}
271
272void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
273  DCHECK(decoding_surface_);
274  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
275  eglReleaseTexImage(
276    egl_display,
277    decoding_surface_,
278    EGL_BACK_BUFFER);
279  set_available(true);
280}
281
282bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
283    CopyOutputSampleDataToPictureBuffer(IDirect3DSurface9* dest_surface) {
284  DCHECK(dest_surface);
285
286  D3DSURFACE_DESC surface_desc;
287  HRESULT hr = dest_surface->GetDesc(&surface_desc);
288  RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
289
290  D3DSURFACE_DESC texture_desc;
291  decoding_texture_->GetLevelDesc(0, &texture_desc);
292  // TODO(ananta)
293  // We need to support mid stream resize.
294  if (texture_desc.Width != surface_desc.Width ||
295      texture_desc.Height != surface_desc.Height) {
296    NOTREACHED() << "Decode surface of different dimension than texture";
297    return false;
298  }
299
300  hr = d3d9_->CheckDeviceFormatConversion(D3DADAPTER_DEFAULT,
301                                          D3DDEVTYPE_HAL,
302                                          surface_desc.Format,
303                                          D3DFMT_X8R8G8B8);
304  bool device_supports_format_conversion = (hr == S_OK);
305
306  RETURN_ON_FAILURE(device_supports_format_conversion,
307                    "Device does not support format converision",
308                    false);
309
310  // This function currently executes in the context of IPC handlers in the
311  // GPU process which ensures that there is always an OpenGL context.
312  GLint current_texture = 0;
313  glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
314
315  glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
316
317  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
318
319  base::win::ScopedComPtr<IDirect3DSurface9> d3d_surface;
320  hr = decoding_texture_->GetSurfaceLevel(0, d3d_surface.Receive());
321  RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
322
323  hr = device_->StretchRect(dest_surface,
324                            NULL,
325                            d3d_surface,
326                            NULL,
327                            D3DTEXF_NONE);
328  RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",
329                        false);
330
331  // Ideally, this should be done immediately before the draw call that uses
332  // the texture. Flush it once here though.
333  hr = query_->Issue(D3DISSUE_END);
334  RETURN_ON_HR_FAILURE(hr, "Failed to issue END", false);
335
336  // The DXVA decoder has its own device which it uses for decoding. ANGLE
337  // has its own device which we don't have access to.
338  // The above code attempts to copy the decoded picture into a surface
339  // which is owned by ANGLE. As there are multiple devices involved in
340  // this, the StretchRect call above is not synchronous.
341  // We attempt to flush the batched operations to ensure that the picture is
342  // copied to the surface owned by ANGLE.
343  // We need to do this in a loop and call flush multiple times.
344  // We have seen the GetData call for flushing the command buffer fail to
345  // return success occassionally on multi core machines, leading to an
346  // infinite loop.
347  // Workaround is to have an upper limit of 10 on the number of iterations to
348  // wait for the Flush to finish.
349  int iterations = 0;
350  while ((query_->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE) &&
351          ++iterations < kMaxIterationsForD3DFlush) {
352    Sleep(1);  // Poor-man's Yield().
353  }
354  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
355  eglBindTexImage(
356      egl_display,
357      decoding_surface_,
358      EGL_BACK_BUFFER);
359  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
360  glBindTexture(GL_TEXTURE_2D, current_texture);
361  return true;
362}
363
364DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
365    int32 buffer_id, IMFSample* sample)
366    : input_buffer_id(buffer_id) {
367  output_sample.Attach(sample);
368}
369
370DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
371
372// static
373void DXVAVideoDecodeAccelerator::PreSandboxInitialization() {
374  // Should be called only once during program startup.
375  DCHECK(!pre_sandbox_init_done_);
376
377  static const wchar_t* kDecodingDlls[] = {
378    L"d3d9.dll",
379    L"dxva2.dll",
380    L"mf.dll",
381    L"mfplat.dll",
382    L"msmpeg2vdec.dll",
383  };
384
385  for (int i = 0; i < arraysize(kDecodingDlls); ++i) {
386    if (!::LoadLibrary(kDecodingDlls[i])) {
387      DLOG(ERROR) << "Failed to load decoder dll: " << kDecodingDlls[i]
388                  << ", Error: " << ::GetLastError();
389      return;
390    }
391  }
392
393  RETURN_ON_FAILURE(CreateD3DDevManager(),
394                    "Failed to initialize D3D device and manager",);
395  pre_sandbox_init_done_ = true;
396}
397
398// static
399bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
400  HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, &d3d9_);
401  RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
402
403  D3DPRESENT_PARAMETERS present_params = {0};
404  present_params.BackBufferWidth = 1;
405  present_params.BackBufferHeight = 1;
406  present_params.BackBufferFormat = D3DFMT_UNKNOWN;
407  present_params.BackBufferCount = 1;
408  present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
409  present_params.hDeviceWindow = ::GetShellWindow();
410  present_params.Windowed = TRUE;
411  present_params.Flags = D3DPRESENTFLAG_VIDEO;
412  present_params.FullScreen_RefreshRateInHz = 0;
413  present_params.PresentationInterval = 0;
414
415  hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
416                             D3DDEVTYPE_HAL,
417                             ::GetShellWindow(),
418                             D3DCREATE_FPU_PRESERVE |
419                             D3DCREATE_SOFTWARE_VERTEXPROCESSING |
420                             D3DCREATE_DISABLE_PSGP_THREADING |
421                             D3DCREATE_MULTITHREADED,
422                             &present_params,
423                             NULL,
424                             &device_);
425  RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
426
427  hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
428                                         &device_manager_);
429  RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
430
431  hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_);
432  RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
433
434  hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, &query_);
435  RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
436  // Ensure query_ API works (to avoid an infinite loop later in
437  // CopyOutputSampleDataToPictureBuffer).
438  hr = query_->Issue(D3DISSUE_END);
439  RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
440  return true;
441}
442
443DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
444    media::VideoDecodeAccelerator::Client* client,
445    const base::Callback<bool(void)>& make_context_current)
446    : client_(client),
447      egl_config_(NULL),
448      state_(kUninitialized),
449      pictures_requested_(false),
450      inputs_before_decode_(0),
451      make_context_current_(make_context_current) {
452  memset(&input_stream_info_, 0, sizeof(input_stream_info_));
453  memset(&output_stream_info_, 0, sizeof(output_stream_info_));
454}
455
456DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
457  client_ = NULL;
458}
459
460bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile) {
461  DCHECK(CalledOnValidThread());
462
463  // TODO(ananta)
464  // H264PROFILE_HIGH video decoding is janky at times. Needs more
465  // investigation.
466  if (profile != media::H264PROFILE_BASELINE &&
467      profile != media::H264PROFILE_MAIN &&
468      profile != media::H264PROFILE_HIGH) {
469    RETURN_AND_NOTIFY_ON_FAILURE(false,
470        "Unsupported h264 profile", PLATFORM_FAILURE, false);
471  }
472
473  RETURN_AND_NOTIFY_ON_FAILURE(pre_sandbox_init_done_,
474      "PreSandbox initialization not completed", PLATFORM_FAILURE, false);
475
476  RETURN_AND_NOTIFY_ON_FAILURE(
477      gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
478      "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
479      PLATFORM_FAILURE,
480      false);
481
482  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized),
483      "Initialize: invalid state: " << state_, ILLEGAL_STATE, false);
484
485  HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
486  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE,
487      false);
488
489  RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(),
490      "Failed to initialize decoder", PLATFORM_FAILURE, false);
491
492  RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
493      "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
494
495  RETURN_AND_NOTIFY_ON_FAILURE(
496      SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
497      "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
498      PLATFORM_FAILURE, false);
499
500  RETURN_AND_NOTIFY_ON_FAILURE(
501      SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
502      "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
503      PLATFORM_FAILURE, false);
504
505  state_ = kNormal;
506  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
507      &DXVAVideoDecodeAccelerator::NotifyInitializeDone,
508      base::AsWeakPtr(this)));
509  return true;
510}
511
512void DXVAVideoDecodeAccelerator::Decode(
513    const media::BitstreamBuffer& bitstream_buffer) {
514  DCHECK(CalledOnValidThread());
515
516  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped ||
517                                state_ == kFlushing),
518      "Invalid state: " << state_, ILLEGAL_STATE,);
519
520  base::win::ScopedComPtr<IMFSample> sample;
521  sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
522                                            input_stream_info_.cbSize,
523                                            input_stream_info_.cbAlignment));
524  RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample",
525                               PLATFORM_FAILURE,);
526
527  RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
528      "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
529
530  DecodeInternal(sample);
531}
532
533void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
534    const std::vector<media::PictureBuffer>& buffers) {
535  DCHECK(CalledOnValidThread());
536
537  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
538      "Invalid state: " << state_, ILLEGAL_STATE,);
539  RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
540      "Failed to provide requested picture buffers. (Got " << buffers.size() <<
541      ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
542
543  // Copy the picture buffers provided by the client to the available list,
544  // and mark these buffers as available for use.
545  for (size_t buffer_index = 0; buffer_index < buffers.size();
546       ++buffer_index) {
547    linked_ptr<DXVAPictureBuffer> picture_buffer =
548        DXVAPictureBuffer::Create(buffers[buffer_index], egl_config_);
549    RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
550        "Failed to allocate picture buffer", PLATFORM_FAILURE,);
551
552    bool inserted = output_picture_buffers_.insert(std::make_pair(
553        buffers[buffer_index].id(), picture_buffer)).second;
554    DCHECK(inserted);
555  }
556  ProcessPendingSamples();
557  if (state_ == kFlushing && pending_output_samples_.empty())
558    FlushInternal();
559}
560
561void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
562    int32 picture_buffer_id) {
563  DCHECK(CalledOnValidThread());
564
565  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
566      "Invalid state: " << state_, ILLEGAL_STATE,);
567
568  OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
569  RETURN_AND_NOTIFY_ON_FAILURE(it != output_picture_buffers_.end(),
570      "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
571
572  it->second->ReusePictureBuffer();
573  ProcessPendingSamples();
574
575  if (state_ == kFlushing && pending_output_samples_.empty())
576    FlushInternal();
577}
578
579void DXVAVideoDecodeAccelerator::Flush() {
580  DCHECK(CalledOnValidThread());
581
582  DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
583
584  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
585      "Unexpected decoder state: " << state_, ILLEGAL_STATE,);
586
587  state_ = kFlushing;
588
589  RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
590      "Failed to send drain message", PLATFORM_FAILURE,);
591
592  if (!pending_output_samples_.empty())
593    return;
594
595  FlushInternal();
596}
597
598void DXVAVideoDecodeAccelerator::Reset() {
599  DCHECK(CalledOnValidThread());
600
601  DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
602
603  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
604      "Reset: invalid state: " << state_, ILLEGAL_STATE,);
605
606  state_ = kResetting;
607
608  pending_output_samples_.clear();
609
610  NotifyInputBuffersDropped();
611
612  RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
613      "Reset: Failed to send message.", PLATFORM_FAILURE,);
614
615  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
616      &DXVAVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
617
618  state_ = DXVAVideoDecodeAccelerator::kNormal;
619}
620
621void DXVAVideoDecodeAccelerator::Destroy() {
622  DCHECK(CalledOnValidThread());
623  Invalidate();
624  delete this;
625}
626
627bool DXVAVideoDecodeAccelerator::InitDecoder() {
628  // We cannot use CoCreateInstance to instantiate the decoder object as that
629  // fails in the sandbox. We mimic the steps CoCreateInstance uses to
630  // instantiate the object.
631  HMODULE decoder_dll = ::GetModuleHandle(L"msmpeg2vdec.dll");
632  RETURN_ON_FAILURE(decoder_dll,
633                    "msmpeg2vdec.dll required for decoding is not loaded",
634                    false);
635
636  typedef HRESULT (WINAPI* GetClassObject)(const CLSID& clsid,
637                                           const IID& iid,
638                                           void** object);
639
640  GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
641      GetProcAddress(decoder_dll, "DllGetClassObject"));
642  RETURN_ON_FAILURE(get_class_object,
643                    "Failed to get DllGetClassObject pointer", false);
644
645  base::win::ScopedComPtr<IClassFactory> factory;
646  HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT),
647                                __uuidof(IClassFactory),
648                                reinterpret_cast<void**>(factory.Receive()));
649  RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
650
651  hr = factory->CreateInstance(NULL, __uuidof(IMFTransform),
652                               reinterpret_cast<void**>(decoder_.Receive()));
653  RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
654
655  RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
656                    "Failed to check decoder DXVA support", false);
657
658  hr = decoder_->ProcessMessage(
659            MFT_MESSAGE_SET_D3D_MANAGER,
660            reinterpret_cast<ULONG_PTR>(device_manager_));
661  RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
662
663  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
664
665  EGLint config_attribs[] = {
666    EGL_BUFFER_SIZE, 32,
667    EGL_RED_SIZE, 8,
668    EGL_GREEN_SIZE, 8,
669    EGL_BLUE_SIZE, 8,
670    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
671    EGL_ALPHA_SIZE, 0,
672    EGL_NONE
673  };
674
675  EGLint num_configs;
676
677  if (!eglChooseConfig(
678      egl_display,
679      config_attribs,
680      &egl_config_,
681      1,
682      &num_configs))
683    return false;
684
685  return SetDecoderMediaTypes();
686}
687
688bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
689  base::win::ScopedComPtr<IMFAttributes> attributes;
690  HRESULT hr = decoder_->GetAttributes(attributes.Receive());
691  RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
692
693  UINT32 dxva = 0;
694  hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
695  RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
696
697  hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
698  RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
699  return true;
700}
701
702bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
703  RETURN_ON_FAILURE(SetDecoderInputMediaType(),
704                    "Failed to set decoder input media type", false);
705  return SetDecoderOutputMediaType(MFVideoFormat_NV12);
706}
707
708bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
709  base::win::ScopedComPtr<IMFMediaType> media_type;
710  HRESULT hr = MFCreateMediaType(media_type.Receive());
711  RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
712
713  hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
714  RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
715
716  hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
717  RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
718
719  // Not sure about this. msdn recommends setting this value on the input
720  // media type.
721  hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
722                             MFVideoInterlace_MixedInterlaceOrProgressive);
723  RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
724
725  hr = decoder_->SetInputType(0, media_type, 0);  // No flags
726  RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
727  return true;
728}
729
730bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
731    const GUID& subtype) {
732  base::win::ScopedComPtr<IMFMediaType> out_media_type;
733
734  for (uint32 i = 0;
735       SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
736                                                  out_media_type.Receive()));
737       ++i) {
738    GUID out_subtype = {0};
739    HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
740    RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
741
742    if (out_subtype == subtype) {
743      hr = decoder_->SetOutputType(0, out_media_type, 0);  // No flags
744      RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
745      return true;
746    }
747    out_media_type.Release();
748  }
749  return false;
750}
751
752bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
753                                                int32 param) {
754  HRESULT hr = decoder_->ProcessMessage(msg, param);
755  return SUCCEEDED(hr);
756}
757
758// Gets the minimum buffer sizes for input and output samples. The MFT will not
759// allocate buffer for input nor output, so we have to do it ourselves and make
760// sure they're the correct size. We only provide decoding if DXVA is enabled.
761bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
762  HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
763  RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
764
765  hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
766  RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
767
768  DVLOG(1) << "Input stream info: ";
769  DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
770  // There should be three flags, one for requiring a whole frame be in a
771  // single sample, one for requiring there be one buffer only in a single
772  // sample, and one that specifies a fixed sample size. (as in cbSize)
773  CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
774
775  DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
776  DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
777  DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
778
779  DVLOG(1) << "Output stream info: ";
780  // The flags here should be the same and mean the same thing, except when
781  // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
782  // allocate its own sample.
783  DVLOG(1) << "Flags: "
784          << std::hex << std::showbase << output_stream_info_.dwFlags;
785  CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
786  DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
787  DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
788  return true;
789}
790
791void DXVAVideoDecodeAccelerator::DoDecode() {
792  // This function is also called from FlushInternal in a loop which could
793  // result in the state transitioning to kStopped due to no decoded output.
794  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kFlushing ||
795                                state_ == kStopped),
796      "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
797
798  MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
799  DWORD status = 0;
800
801  HRESULT hr = decoder_->ProcessOutput(0,  // No flags
802                                       1,  // # of out streams to pull from
803                                       &output_data_buffer,
804                                       &status);
805  IMFCollection* events = output_data_buffer.pEvents;
806  if (events != NULL) {
807    VLOG(1) << "Got events from ProcessOuput, but discarding";
808    events->Release();
809  }
810  if (FAILED(hr)) {
811    // A stream change needs further ProcessInput calls to get back decoder
812    // output which is why we need to set the state to stopped.
813    if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
814      if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
815        // Decoder didn't let us set NV12 output format. Not sure as to why
816        // this can happen. Give up in disgust.
817        NOTREACHED() << "Failed to set decoder output media type to NV12";
818        state_ = kStopped;
819      } else {
820        DVLOG(1) << "Received output format change from the decoder."
821                    " Recursively invoking DoDecode";
822        DoDecode();
823      }
824      return;
825    } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
826      // No more output from the decoder. Stop playback.
827      state_ = kStopped;
828      return;
829    } else {
830      NOTREACHED() << "Unhandled error in DoDecode()";
831      return;
832    }
833  }
834  TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
835
836  TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
837                 inputs_before_decode_);
838
839  inputs_before_decode_ = 0;
840
841  RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
842      "Failed to process output sample.", PLATFORM_FAILURE,);
843}
844
845bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
846  RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
847
848  base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
849  HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
850  RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
851
852  base::win::ScopedComPtr<IDirect3DSurface9> surface;
853  hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
854                    IID_PPV_ARGS(surface.Receive()));
855  RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
856                       false);
857
858  LONGLONG input_buffer_id = 0;
859  RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
860                       "Failed to get input buffer id associated with sample",
861                       false);
862
863  pending_output_samples_.push_back(
864      PendingSampleInfo(input_buffer_id, sample));
865
866  // If we have available picture buffers to copy the output data then use the
867  // first one and then flag it as not being available for use.
868  if (output_picture_buffers_.size()) {
869    ProcessPendingSamples();
870    return true;
871  }
872  if (pictures_requested_) {
873    DVLOG(1) << "Waiting for picture slots from the client.";
874    return true;
875  }
876
877  // We only read the surface description, which contains its width/height when
878  // we need the picture buffers from the client. Once we have those, then they
879  // are reused. This won't work if the frame sizes change mid stream.
880  // There is a TODO comment in the
881  // DXVAVideoDecodeAccelerator::RequestPictureBuffers function which talks
882  // about supporting this.
883  D3DSURFACE_DESC surface_desc;
884  hr = surface->GetDesc(&surface_desc);
885  RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
886
887  // Go ahead and request picture buffers.
888  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
889      &DXVAVideoDecodeAccelerator::RequestPictureBuffers,
890      base::AsWeakPtr(this), surface_desc.Width, surface_desc.Height));
891
892  pictures_requested_ = true;
893  return true;
894}
895
896void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
897  RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
898      "Failed to make context current", PLATFORM_FAILURE,);
899
900  OutputBuffers::iterator index;
901
902  for (index = output_picture_buffers_.begin();
903       index != output_picture_buffers_.end() &&
904       !pending_output_samples_.empty();
905       ++index) {
906    if (index->second->available()) {
907      PendingSampleInfo sample_info = pending_output_samples_.front();
908
909      base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
910      HRESULT hr = sample_info.output_sample->GetBufferByIndex(
911          0, output_buffer.Receive());
912      RETURN_AND_NOTIFY_ON_HR_FAILURE(
913          hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,);
914
915      base::win::ScopedComPtr<IDirect3DSurface9> surface;
916      hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
917                        IID_PPV_ARGS(surface.Receive()));
918      RETURN_AND_NOTIFY_ON_HR_FAILURE(
919          hr, "Failed to get D3D surface from output sample",
920          PLATFORM_FAILURE,);
921
922      RETURN_AND_NOTIFY_ON_FAILURE(
923          index->second->CopyOutputSampleDataToPictureBuffer(
924              surface),
925          "Failed to copy output sample", PLATFORM_FAILURE,);
926
927      media::Picture output_picture(index->second->id(),
928                                    sample_info.input_buffer_id);
929      base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
930          &DXVAVideoDecodeAccelerator::NotifyPictureReady,
931          base::AsWeakPtr(this), output_picture));
932
933      index->second->set_available(false);
934      pending_output_samples_.pop_front();
935    }
936  }
937
938  if (!pending_input_buffers_.empty() && pending_output_samples_.empty()) {
939    base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
940        &DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
941        base::AsWeakPtr(this)));
942  }
943}
944
945void DXVAVideoDecodeAccelerator::StopOnError(
946  media::VideoDecodeAccelerator::Error error) {
947  DCHECK(CalledOnValidThread());
948
949  if (client_)
950    client_->NotifyError(error);
951  client_ = NULL;
952
953  if (state_ != kUninitialized) {
954    Invalidate();
955  }
956}
957
958void DXVAVideoDecodeAccelerator::Invalidate() {
959  if (state_ == kUninitialized)
960    return;
961  output_picture_buffers_.clear();
962  pending_output_samples_.clear();
963  pending_input_buffers_.clear();
964  decoder_.Release();
965  MFShutdown();
966  state_ = kUninitialized;
967}
968
969void DXVAVideoDecodeAccelerator::NotifyInitializeDone() {
970  if (client_)
971    client_->NotifyInitializeDone();
972}
973
974void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
975  if (client_)
976    client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
977}
978
979void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
980  if (client_)
981    client_->NotifyFlushDone();
982}
983
984void DXVAVideoDecodeAccelerator::NotifyResetDone() {
985  if (client_)
986    client_->NotifyResetDone();
987}
988
989void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
990  // This task could execute after the decoder has been torn down.
991  // TODO(ananta)
992  // We need to support mid stream resize.
993  if (state_ != kUninitialized && client_) {
994    client_->ProvidePictureBuffers(
995        kNumPictureBuffers,
996        gfx::Size(width, height),
997        GL_TEXTURE_2D);
998  }
999}
1000
1001void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1002    const media::Picture& picture) {
1003  // This task could execute after the decoder has been torn down.
1004  if (state_ != kUninitialized && client_)
1005    client_->PictureReady(picture);
1006}
1007
1008void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1009  if (!client_ || !pending_output_samples_.empty())
1010    return;
1011
1012  for (PendingInputs::iterator it = pending_input_buffers_.begin();
1013       it != pending_input_buffers_.end(); ++it) {
1014    LONGLONG input_buffer_id = 0;
1015    RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1016                         "Failed to get buffer id associated with sample",);
1017    client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1018  }
1019  pending_input_buffers_.clear();
1020}
1021
1022void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1023  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
1024      "Invalid state: " << state_, ILLEGAL_STATE,);
1025
1026  if (pending_input_buffers_.empty() || !pending_output_samples_.empty())
1027    return;
1028
1029  PendingInputs pending_input_buffers_copy;
1030  std::swap(pending_input_buffers_, pending_input_buffers_copy);
1031
1032  for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1033       it != pending_input_buffers_copy.end(); ++it) {
1034    DecodeInternal(*it);
1035  }
1036}
1037
1038void DXVAVideoDecodeAccelerator::FlushInternal() {
1039  // The DoDecode function sets the state to kStopped when the decoder returns
1040  // MF_E_TRANSFORM_NEED_MORE_INPUT.
1041  // The MFT decoder can buffer upto 30 frames worth of input before returning
1042  // an output frame. This loop here attempts to retrieve as many output frames
1043  // as possible from the buffered set.
1044  while (state_ != kStopped) {
1045    DoDecode();
1046    if (!pending_output_samples_.empty())
1047      return;
1048  }
1049
1050  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1051      &DXVAVideoDecodeAccelerator::NotifyFlushDone, base::AsWeakPtr(this)));
1052
1053  state_ = kNormal;
1054}
1055
1056void DXVAVideoDecodeAccelerator::DecodeInternal(
1057    const base::win::ScopedComPtr<IMFSample>& sample) {
1058  DCHECK(CalledOnValidThread());
1059
1060  if (state_ == kUninitialized)
1061    return;
1062
1063  if (!pending_output_samples_.empty() || !pending_input_buffers_.empty()) {
1064    pending_input_buffers_.push_back(sample);
1065    return;
1066  }
1067
1068  if (!inputs_before_decode_) {
1069    TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1070  }
1071  inputs_before_decode_++;
1072
1073  HRESULT hr = decoder_->ProcessInput(0, sample, 0);
1074  // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1075  // has enough data to produce one or more output samples. In this case the
1076  // recommended options are to
1077  // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1078  //    returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1079  // 2. Flush the input data
1080  // We implement the first option, i.e to retrieve the output sample and then
1081  // process the input again. Failure in either of these steps is treated as a
1082  // decoder failure.
1083  if (hr == MF_E_NOTACCEPTING) {
1084    DoDecode();
1085    RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1086        "Failed to process output. Unexpected decoder state: " << state_,
1087        PLATFORM_FAILURE,);
1088    hr = decoder_->ProcessInput(0, sample, 0);
1089    // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1090    // 1. Add the input sample to the pending queue.
1091    // 2. If we don't have any output samples we post the
1092    //    DecodePendingInputBuffers task to process the pending input samples.
1093    //    If we have an output sample then the above task is posted when the
1094    //    output samples are sent to the client.
1095    // This is because we only support 1 pending output sample at any
1096    // given time due to the limitation with the Microsoft media foundation
1097    // decoder where it recycles the output Decoder surfaces.
1098    if (hr == MF_E_NOTACCEPTING) {
1099      pending_input_buffers_.push_back(sample);
1100      if (pending_output_samples_.empty()) {
1101        base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1102            &DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1103            base::AsWeakPtr(this)));
1104      }
1105      return;
1106    }
1107  }
1108  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1109      PLATFORM_FAILURE,);
1110
1111  DoDecode();
1112
1113  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1114      "Failed to process output. Unexpected decoder state: " << state_,
1115      ILLEGAL_STATE,);
1116
1117  LONGLONG input_buffer_id = 0;
1118  RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1119                       "Failed to get input buffer id associated with sample",);
1120  // The Microsoft Media foundation decoder internally buffers up to 30 frames
1121  // before returning a decoded frame. We need to inform the client that this
1122  // input buffer is processed as it may stop sending us further input.
1123  // Note: This may break clients which expect every input buffer to be
1124  // associated with a decoded output buffer.
1125  // TODO(ananta)
1126  // Do some more investigation into whether it is possible to get the MFT
1127  // decoder to emit an output packet for every input packet.
1128  // http://code.google.com/p/chromium/issues/detail?id=108121
1129  // http://code.google.com/p/chromium/issues/detail?id=150925
1130  base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
1131      &DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1132      base::AsWeakPtr(this), input_buffer_id));
1133}
1134
1135}  // namespace content
1136