1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "content/common/gpu/media/dxva_video_decode_accelerator.h"
6
7#if !defined(OS_WIN)
8#error This file should only be built on Windows.
9#endif   // !defined(OS_WIN)
10
11#include <ks.h>
12#include <codecapi.h>
13#include <mfapi.h>
14#include <mferror.h>
15#include <wmcodecdsp.h>
16
17#include "base/bind.h"
18#include "base/callback.h"
19#include "base/command_line.h"
20#include "base/debug/trace_event.h"
21#include "base/file_version_info.h"
22#include "base/logging.h"
23#include "base/memory/scoped_ptr.h"
24#include "base/memory/shared_memory.h"
25#include "base/message_loop/message_loop.h"
26#include "base/win/windows_version.h"
27#include "media/video/video_decode_accelerator.h"
28#include "ui/gl/gl_bindings.h"
29#include "ui/gl/gl_surface_egl.h"
30#include "ui/gl/gl_switches.h"
31
32namespace content {
33
34// We only request 5 picture buffers from the client which are used to hold the
35// decoded samples. These buffers are then reused when the client tells us that
36// it is done with the buffer.
37static const int kNumPictureBuffers = 5;
38
39#define RETURN_ON_FAILURE(result, log, ret)  \
40  do {                                       \
41    if (!(result)) {                         \
42      DLOG(ERROR) << log;                    \
43      return ret;                            \
44    }                                        \
45  } while (0)
46
47#define RETURN_ON_HR_FAILURE(result, log, ret)                    \
48  RETURN_ON_FAILURE(SUCCEEDED(result),                            \
49                    log << ", HRESULT: 0x" << std::hex << result, \
50                    ret);
51
52#define RETURN_AND_NOTIFY_ON_FAILURE(result, log, error_code, ret)  \
53  do {                                                              \
54    if (!(result)) {                                                \
55      DVLOG(1) << log;                                              \
56      StopOnError(error_code);                                      \
57      return ret;                                                   \
58    }                                                               \
59  } while (0)
60
61#define RETURN_AND_NOTIFY_ON_HR_FAILURE(result, log, error_code, ret)  \
62  RETURN_AND_NOTIFY_ON_FAILURE(SUCCEEDED(result),                      \
63                               log << ", HRESULT: 0x" << std::hex << result, \
64                               error_code, ret);
65
66// Maximum number of iterations we allow before aborting the attempt to flush
67// the batched queries to the driver and allow torn/corrupt frames to be
68// rendered.
69enum { kMaxIterationsForD3DFlush = 10 };
70
71static IMFSample* CreateEmptySample() {
72  base::win::ScopedComPtr<IMFSample> sample;
73  HRESULT hr = MFCreateSample(sample.Receive());
74  RETURN_ON_HR_FAILURE(hr, "MFCreateSample failed", NULL);
75  return sample.Detach();
76}
77
78// Creates a Media Foundation sample with one buffer of length |buffer_length|
79// on a |align|-byte boundary. Alignment must be a perfect power of 2 or 0.
80static IMFSample* CreateEmptySampleWithBuffer(int buffer_length, int align) {
81  CHECK_GT(buffer_length, 0);
82
83  base::win::ScopedComPtr<IMFSample> sample;
84  sample.Attach(CreateEmptySample());
85
86  base::win::ScopedComPtr<IMFMediaBuffer> buffer;
87  HRESULT hr = E_FAIL;
88  if (align == 0) {
89    // Note that MFCreateMemoryBuffer is same as MFCreateAlignedMemoryBuffer
90    // with the align argument being 0.
91    hr = MFCreateMemoryBuffer(buffer_length, buffer.Receive());
92  } else {
93    hr = MFCreateAlignedMemoryBuffer(buffer_length,
94                                     align - 1,
95                                     buffer.Receive());
96  }
97  RETURN_ON_HR_FAILURE(hr, "Failed to create memory buffer for sample", NULL);
98
99  hr = sample->AddBuffer(buffer);
100  RETURN_ON_HR_FAILURE(hr, "Failed to add buffer to sample", NULL);
101
102  return sample.Detach();
103}
104
105// Creates a Media Foundation sample with one buffer containing a copy of the
106// given Annex B stream data.
107// If duration and sample time are not known, provide 0.
108// |min_size| specifies the minimum size of the buffer (might be required by
109// the decoder for input). If no alignment is required, provide 0.
110static IMFSample* CreateInputSample(const uint8* stream, int size,
111                                    int min_size, int alignment) {
112  CHECK(stream);
113  CHECK_GT(size, 0);
114  base::win::ScopedComPtr<IMFSample> sample;
115  sample.Attach(CreateEmptySampleWithBuffer(std::max(min_size, size),
116                                            alignment));
117  RETURN_ON_FAILURE(sample, "Failed to create empty sample", NULL);
118
119  base::win::ScopedComPtr<IMFMediaBuffer> buffer;
120  HRESULT hr = sample->GetBufferByIndex(0, buffer.Receive());
121  RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from sample", NULL);
122
123  DWORD max_length = 0;
124  DWORD current_length = 0;
125  uint8* destination = NULL;
126  hr = buffer->Lock(&destination, &max_length, &current_length);
127  RETURN_ON_HR_FAILURE(hr, "Failed to lock buffer", NULL);
128
129  CHECK_EQ(current_length, 0u);
130  CHECK_GE(static_cast<int>(max_length), size);
131  memcpy(destination, stream, size);
132
133  hr = buffer->Unlock();
134  RETURN_ON_HR_FAILURE(hr, "Failed to unlock buffer", NULL);
135
136  hr = buffer->SetCurrentLength(size);
137  RETURN_ON_HR_FAILURE(hr, "Failed to set buffer length", NULL);
138
139  return sample.Detach();
140}
141
142static IMFSample* CreateSampleFromInputBuffer(
143    const media::BitstreamBuffer& bitstream_buffer,
144    DWORD stream_size,
145    DWORD alignment) {
146  base::SharedMemory shm(bitstream_buffer.handle(), true);
147  RETURN_ON_FAILURE(shm.Map(bitstream_buffer.size()),
148                    "Failed in base::SharedMemory::Map", NULL);
149
150  return CreateInputSample(reinterpret_cast<const uint8*>(shm.memory()),
151                           bitstream_buffer.size(),
152                           stream_size,
153                           alignment);
154}
155
156// Maintains information about a DXVA picture buffer, i.e. whether it is
157// available for rendering, the texture information, etc.
158struct DXVAVideoDecodeAccelerator::DXVAPictureBuffer {
159 public:
160  static linked_ptr<DXVAPictureBuffer> Create(
161      const DXVAVideoDecodeAccelerator& decoder,
162      const media::PictureBuffer& buffer,
163      EGLConfig egl_config);
164  ~DXVAPictureBuffer();
165
166  void ReusePictureBuffer();
167  // Copies the output sample data to the picture buffer provided by the
168  // client.
169  // The dest_surface parameter contains the decoded bits.
170  bool CopyOutputSampleDataToPictureBuffer(
171      const DXVAVideoDecodeAccelerator& decoder,
172      IDirect3DSurface9* dest_surface);
173
174  bool available() const {
175    return available_;
176  }
177
178  void set_available(bool available) {
179    available_ = available;
180  }
181
182  int id() const {
183    return picture_buffer_.id();
184  }
185
186  gfx::Size size() const {
187    return picture_buffer_.size();
188  }
189
190 private:
191  explicit DXVAPictureBuffer(const media::PictureBuffer& buffer);
192
193  bool available_;
194  media::PictureBuffer picture_buffer_;
195  EGLSurface decoding_surface_;
196  base::win::ScopedComPtr<IDirect3DTexture9> decoding_texture_;
197  // Set to true if RGB is supported by the texture.
198  // Defaults to true.
199  bool use_rgb_;
200
201  DISALLOW_COPY_AND_ASSIGN(DXVAPictureBuffer);
202};
203
204// static
205linked_ptr<DXVAVideoDecodeAccelerator::DXVAPictureBuffer>
206DXVAVideoDecodeAccelerator::DXVAPictureBuffer::Create(
207    const DXVAVideoDecodeAccelerator& decoder,
208    const media::PictureBuffer& buffer,
209    EGLConfig egl_config) {
210  linked_ptr<DXVAPictureBuffer> picture_buffer(new DXVAPictureBuffer(buffer));
211
212  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
213
214  EGLint use_rgb = 1;
215  eglGetConfigAttrib(egl_display, egl_config, EGL_BIND_TO_TEXTURE_RGB,
216                     &use_rgb);
217
218  EGLint attrib_list[] = {
219    EGL_WIDTH, buffer.size().width(),
220    EGL_HEIGHT, buffer.size().height(),
221    EGL_TEXTURE_FORMAT, use_rgb ? EGL_TEXTURE_RGB : EGL_TEXTURE_RGBA,
222    EGL_TEXTURE_TARGET, EGL_TEXTURE_2D,
223    EGL_NONE
224  };
225
226  picture_buffer->decoding_surface_ = eglCreatePbufferSurface(
227      egl_display,
228      egl_config,
229      attrib_list);
230  RETURN_ON_FAILURE(picture_buffer->decoding_surface_,
231                    "Failed to create surface",
232                    linked_ptr<DXVAPictureBuffer>(NULL));
233
234  HANDLE share_handle = NULL;
235  EGLBoolean ret = eglQuerySurfacePointerANGLE(
236      egl_display,
237      picture_buffer->decoding_surface_,
238      EGL_D3D_TEXTURE_2D_SHARE_HANDLE_ANGLE,
239      &share_handle);
240
241  RETURN_ON_FAILURE(share_handle && ret == EGL_TRUE,
242                    "Failed to query ANGLE surface pointer",
243                    linked_ptr<DXVAPictureBuffer>(NULL));
244
245  HRESULT hr = decoder.device_->CreateTexture(
246      buffer.size().width(),
247      buffer.size().height(),
248      1,
249      D3DUSAGE_RENDERTARGET,
250      use_rgb ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8,
251      D3DPOOL_DEFAULT,
252      picture_buffer->decoding_texture_.Receive(),
253      &share_handle);
254
255  RETURN_ON_HR_FAILURE(hr, "Failed to create texture",
256                       linked_ptr<DXVAPictureBuffer>(NULL));
257  picture_buffer->use_rgb_ = !!use_rgb;
258  return picture_buffer;
259}
260
261DXVAVideoDecodeAccelerator::DXVAPictureBuffer::DXVAPictureBuffer(
262    const media::PictureBuffer& buffer)
263    : available_(true),
264      picture_buffer_(buffer),
265      decoding_surface_(NULL),
266      use_rgb_(true) {
267}
268
269DXVAVideoDecodeAccelerator::DXVAPictureBuffer::~DXVAPictureBuffer() {
270  if (decoding_surface_) {
271    EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
272
273    eglReleaseTexImage(
274        egl_display,
275        decoding_surface_,
276        EGL_BACK_BUFFER);
277
278    eglDestroySurface(
279        egl_display,
280        decoding_surface_);
281    decoding_surface_ = NULL;
282  }
283}
284
285void DXVAVideoDecodeAccelerator::DXVAPictureBuffer::ReusePictureBuffer() {
286  DCHECK(decoding_surface_);
287  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
288  eglReleaseTexImage(
289    egl_display,
290    decoding_surface_,
291    EGL_BACK_BUFFER);
292  set_available(true);
293}
294
295bool DXVAVideoDecodeAccelerator::DXVAPictureBuffer::
296    CopyOutputSampleDataToPictureBuffer(
297        const DXVAVideoDecodeAccelerator& decoder,
298        IDirect3DSurface9* dest_surface) {
299  DCHECK(dest_surface);
300
301  D3DSURFACE_DESC surface_desc;
302  HRESULT hr = dest_surface->GetDesc(&surface_desc);
303  RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
304
305  D3DSURFACE_DESC texture_desc;
306  decoding_texture_->GetLevelDesc(0, &texture_desc);
307
308  if (texture_desc.Width != surface_desc.Width ||
309      texture_desc.Height != surface_desc.Height) {
310    NOTREACHED() << "Decode surface of different dimension than texture";
311    return false;
312  }
313
314  hr = decoder.d3d9_->CheckDeviceFormatConversion(
315      D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, surface_desc.Format,
316      use_rgb_ ? D3DFMT_X8R8G8B8 : D3DFMT_A8R8G8B8);
317  RETURN_ON_HR_FAILURE(hr, "Device does not support format converision", false);
318
319  // This function currently executes in the context of IPC handlers in the
320  // GPU process which ensures that there is always an OpenGL context.
321  GLint current_texture = 0;
322  glGetIntegerv(GL_TEXTURE_BINDING_2D, &current_texture);
323
324  glBindTexture(GL_TEXTURE_2D, picture_buffer_.texture_id());
325
326  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
327
328  base::win::ScopedComPtr<IDirect3DSurface9> d3d_surface;
329  hr = decoding_texture_->GetSurfaceLevel(0, d3d_surface.Receive());
330  RETURN_ON_HR_FAILURE(hr, "Failed to get surface from texture", false);
331
332  hr = decoder.device_->StretchRect(
333      dest_surface, NULL, d3d_surface, NULL, D3DTEXF_NONE);
334  RETURN_ON_HR_FAILURE(hr, "Colorspace conversion via StretchRect failed",
335                        false);
336
337  // Ideally, this should be done immediately before the draw call that uses
338  // the texture. Flush it once here though.
339  hr = decoder.query_->Issue(D3DISSUE_END);
340  RETURN_ON_HR_FAILURE(hr, "Failed to issue END", false);
341
342  // The DXVA decoder has its own device which it uses for decoding. ANGLE
343  // has its own device which we don't have access to.
344  // The above code attempts to copy the decoded picture into a surface
345  // which is owned by ANGLE. As there are multiple devices involved in
346  // this, the StretchRect call above is not synchronous.
347  // We attempt to flush the batched operations to ensure that the picture is
348  // copied to the surface owned by ANGLE.
349  // We need to do this in a loop and call flush multiple times.
350  // We have seen the GetData call for flushing the command buffer fail to
351  // return success occassionally on multi core machines, leading to an
352  // infinite loop.
353  // Workaround is to have an upper limit of 10 on the number of iterations to
354  // wait for the Flush to finish.
355  int iterations = 0;
356  while ((decoder.query_->GetData(NULL, 0, D3DGETDATA_FLUSH) == S_FALSE) &&
357         ++iterations < kMaxIterationsForD3DFlush) {
358    Sleep(1);  // Poor-man's Yield().
359  }
360  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
361  eglBindTexImage(
362      egl_display,
363      decoding_surface_,
364      EGL_BACK_BUFFER);
365  glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
366  glBindTexture(GL_TEXTURE_2D, current_texture);
367  return true;
368}
369
370DXVAVideoDecodeAccelerator::PendingSampleInfo::PendingSampleInfo(
371    int32 buffer_id, IMFSample* sample)
372    : input_buffer_id(buffer_id) {
373  output_sample.Attach(sample);
374}
375
376DXVAVideoDecodeAccelerator::PendingSampleInfo::~PendingSampleInfo() {}
377
378// static
379bool DXVAVideoDecodeAccelerator::CreateD3DDevManager() {
380  TRACE_EVENT0("gpu", "DXVAVideoDecodeAccelerator_CreateD3DDevManager");
381
382  HRESULT hr = Direct3DCreate9Ex(D3D_SDK_VERSION, d3d9_.Receive());
383  RETURN_ON_HR_FAILURE(hr, "Direct3DCreate9Ex failed", false);
384
385  D3DPRESENT_PARAMETERS present_params = {0};
386  present_params.BackBufferWidth = 1;
387  present_params.BackBufferHeight = 1;
388  present_params.BackBufferFormat = D3DFMT_UNKNOWN;
389  present_params.BackBufferCount = 1;
390  present_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
391  present_params.hDeviceWindow = ::GetShellWindow();
392  present_params.Windowed = TRUE;
393  present_params.Flags = D3DPRESENTFLAG_VIDEO;
394  present_params.FullScreen_RefreshRateInHz = 0;
395  present_params.PresentationInterval = 0;
396
397  hr = d3d9_->CreateDeviceEx(D3DADAPTER_DEFAULT,
398                             D3DDEVTYPE_HAL,
399                             ::GetShellWindow(),
400                             D3DCREATE_FPU_PRESERVE |
401                             D3DCREATE_SOFTWARE_VERTEXPROCESSING |
402                             D3DCREATE_DISABLE_PSGP_THREADING |
403                             D3DCREATE_MULTITHREADED,
404                             &present_params,
405                             NULL,
406                             device_.Receive());
407  RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device", false);
408
409  hr = DXVA2CreateDirect3DDeviceManager9(&dev_manager_reset_token_,
410                                         device_manager_.Receive());
411  RETURN_ON_HR_FAILURE(hr, "DXVA2CreateDirect3DDeviceManager9 failed", false);
412
413  hr = device_manager_->ResetDevice(device_, dev_manager_reset_token_);
414  RETURN_ON_HR_FAILURE(hr, "Failed to reset device", false);
415
416  hr = device_->CreateQuery(D3DQUERYTYPE_EVENT, query_.Receive());
417  RETURN_ON_HR_FAILURE(hr, "Failed to create D3D device query", false);
418  // Ensure query_ API works (to avoid an infinite loop later in
419  // CopyOutputSampleDataToPictureBuffer).
420  hr = query_->Issue(D3DISSUE_END);
421  RETURN_ON_HR_FAILURE(hr, "Failed to issue END test query", false);
422  return true;
423}
424
425DXVAVideoDecodeAccelerator::DXVAVideoDecodeAccelerator(
426    const base::Callback<bool(void)>& make_context_current)
427    : client_(NULL),
428      dev_manager_reset_token_(0),
429      egl_config_(NULL),
430      state_(kUninitialized),
431      pictures_requested_(false),
432      inputs_before_decode_(0),
433      make_context_current_(make_context_current),
434      weak_this_factory_(this) {
435  memset(&input_stream_info_, 0, sizeof(input_stream_info_));
436  memset(&output_stream_info_, 0, sizeof(output_stream_info_));
437}
438
439DXVAVideoDecodeAccelerator::~DXVAVideoDecodeAccelerator() {
440  client_ = NULL;
441}
442
443bool DXVAVideoDecodeAccelerator::Initialize(media::VideoCodecProfile profile,
444                                            Client* client) {
445  DCHECK(CalledOnValidThread());
446
447  client_ = client;
448
449  // Not all versions of Windows 7 and later include Media Foundation DLLs.
450  // Instead of crashing while delay loading the DLL when calling MFStartup()
451  // below, probe whether we can successfully load the DLL now.
452  //
453  // See http://crbug.com/339678 for details.
454  HMODULE mfplat_dll = ::LoadLibrary(L"MFPlat.dll");
455  RETURN_ON_FAILURE(mfplat_dll, "MFPlat.dll is required for decoding", false);
456
457  // TODO(ananta)
458  // H264PROFILE_HIGH video decoding is janky at times. Needs more
459  // investigation. http://crbug.com/426707
460  if (profile != media::H264PROFILE_BASELINE &&
461      profile != media::H264PROFILE_MAIN) {
462    RETURN_AND_NOTIFY_ON_FAILURE(false,
463        "Unsupported h264 profile", PLATFORM_FAILURE, false);
464  }
465
466  RETURN_AND_NOTIFY_ON_FAILURE(
467      gfx::g_driver_egl.ext.b_EGL_ANGLE_surface_d3d_texture_2d_share_handle,
468      "EGL_ANGLE_surface_d3d_texture_2d_share_handle unavailable",
469      PLATFORM_FAILURE,
470      false);
471
472  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kUninitialized),
473      "Initialize: invalid state: " << state_, ILLEGAL_STATE, false);
474
475  HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL);
476  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "MFStartup failed.", PLATFORM_FAILURE,
477      false);
478
479  RETURN_AND_NOTIFY_ON_FAILURE(CreateD3DDevManager(),
480                               "Failed to initialize D3D device and manager",
481                               PLATFORM_FAILURE,
482                               false);
483
484  RETURN_AND_NOTIFY_ON_FAILURE(InitDecoder(profile),
485      "Failed to initialize decoder", PLATFORM_FAILURE, false);
486
487  RETURN_AND_NOTIFY_ON_FAILURE(GetStreamsInfoAndBufferReqs(),
488      "Failed to get input/output stream info.", PLATFORM_FAILURE, false);
489
490  RETURN_AND_NOTIFY_ON_FAILURE(
491      SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0),
492      "Send MFT_MESSAGE_NOTIFY_BEGIN_STREAMING notification failed",
493      PLATFORM_FAILURE, false);
494
495  RETURN_AND_NOTIFY_ON_FAILURE(
496      SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0),
497      "Send MFT_MESSAGE_NOTIFY_START_OF_STREAM notification failed",
498      PLATFORM_FAILURE, false);
499
500  state_ = kNormal;
501  return true;
502}
503
504void DXVAVideoDecodeAccelerator::Decode(
505    const media::BitstreamBuffer& bitstream_buffer) {
506  DCHECK(CalledOnValidThread());
507
508  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped ||
509                                state_ == kFlushing),
510      "Invalid state: " << state_, ILLEGAL_STATE,);
511
512  base::win::ScopedComPtr<IMFSample> sample;
513  sample.Attach(CreateSampleFromInputBuffer(bitstream_buffer,
514                                            input_stream_info_.cbSize,
515                                            input_stream_info_.cbAlignment));
516  RETURN_AND_NOTIFY_ON_FAILURE(sample, "Failed to create input sample",
517                               PLATFORM_FAILURE,);
518
519  RETURN_AND_NOTIFY_ON_HR_FAILURE(sample->SetSampleTime(bitstream_buffer.id()),
520      "Failed to associate input buffer id with sample", PLATFORM_FAILURE,);
521
522  DecodeInternal(sample);
523}
524
525void DXVAVideoDecodeAccelerator::AssignPictureBuffers(
526    const std::vector<media::PictureBuffer>& buffers) {
527  DCHECK(CalledOnValidThread());
528
529  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
530      "Invalid state: " << state_, ILLEGAL_STATE,);
531  RETURN_AND_NOTIFY_ON_FAILURE((kNumPictureBuffers == buffers.size()),
532      "Failed to provide requested picture buffers. (Got " << buffers.size() <<
533      ", requested " << kNumPictureBuffers << ")", INVALID_ARGUMENT,);
534
535  // Copy the picture buffers provided by the client to the available list,
536  // and mark these buffers as available for use.
537  for (size_t buffer_index = 0; buffer_index < buffers.size();
538       ++buffer_index) {
539    linked_ptr<DXVAPictureBuffer> picture_buffer =
540        DXVAPictureBuffer::Create(*this, buffers[buffer_index], egl_config_);
541    RETURN_AND_NOTIFY_ON_FAILURE(picture_buffer.get(),
542        "Failed to allocate picture buffer", PLATFORM_FAILURE,);
543
544    bool inserted = output_picture_buffers_.insert(std::make_pair(
545        buffers[buffer_index].id(), picture_buffer)).second;
546    DCHECK(inserted);
547  }
548  ProcessPendingSamples();
549  if (state_ == kFlushing && pending_output_samples_.empty())
550    FlushInternal();
551}
552
553void DXVAVideoDecodeAccelerator::ReusePictureBuffer(
554    int32 picture_buffer_id) {
555  DCHECK(CalledOnValidThread());
556
557  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
558      "Invalid state: " << state_, ILLEGAL_STATE,);
559
560  if (output_picture_buffers_.empty() && stale_output_picture_buffers_.empty())
561    return;
562
563  OutputBuffers::iterator it = output_picture_buffers_.find(picture_buffer_id);
564  // If we didn't find the picture id in the |output_picture_buffers_| map we
565  // try the |stale_output_picture_buffers_| map, as this may have been an
566  // output picture buffer from before a resolution change, that at resolution
567  // change time had yet to be displayed. The client is calling us back to tell
568  // us that we can now recycle this picture buffer, so if we were waiting to
569  // dispose of it we now can.
570  if (it == output_picture_buffers_.end()) {
571    it = stale_output_picture_buffers_.find(picture_buffer_id);
572    RETURN_AND_NOTIFY_ON_FAILURE(it != stale_output_picture_buffers_.end(),
573        "Invalid picture id: " << picture_buffer_id, INVALID_ARGUMENT,);
574    base::MessageLoop::current()->PostTask(FROM_HERE,
575        base::Bind(&DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer,
576            weak_this_factory_.GetWeakPtr(), picture_buffer_id));
577    return;
578  }
579
580  it->second->ReusePictureBuffer();
581  ProcessPendingSamples();
582
583  if (state_ == kFlushing && pending_output_samples_.empty())
584    FlushInternal();
585}
586
587void DXVAVideoDecodeAccelerator::Flush() {
588  DCHECK(CalledOnValidThread());
589
590  DVLOG(1) << "DXVAVideoDecodeAccelerator::Flush";
591
592  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
593      "Unexpected decoder state: " << state_, ILLEGAL_STATE,);
594
595  state_ = kFlushing;
596
597  RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0),
598      "Failed to send drain message", PLATFORM_FAILURE,);
599
600  if (!pending_output_samples_.empty())
601    return;
602
603  FlushInternal();
604}
605
606void DXVAVideoDecodeAccelerator::Reset() {
607  DCHECK(CalledOnValidThread());
608
609  DVLOG(1) << "DXVAVideoDecodeAccelerator::Reset";
610
611  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kStopped),
612      "Reset: invalid state: " << state_, ILLEGAL_STATE,);
613
614  state_ = kResetting;
615
616  pending_output_samples_.clear();
617
618  NotifyInputBuffersDropped();
619
620  RETURN_AND_NOTIFY_ON_FAILURE(SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0),
621      "Reset: Failed to send message.", PLATFORM_FAILURE,);
622
623  base::MessageLoop::current()->PostTask(
624      FROM_HERE,
625      base::Bind(&DXVAVideoDecodeAccelerator::NotifyResetDone,
626                 weak_this_factory_.GetWeakPtr()));
627
628  state_ = DXVAVideoDecodeAccelerator::kNormal;
629}
630
631void DXVAVideoDecodeAccelerator::Destroy() {
632  DCHECK(CalledOnValidThread());
633  Invalidate();
634  delete this;
635}
636
637bool DXVAVideoDecodeAccelerator::CanDecodeOnIOThread() {
638  return false;
639}
640
641bool DXVAVideoDecodeAccelerator::InitDecoder(media::VideoCodecProfile profile) {
642  if (profile < media::H264PROFILE_MIN || profile > media::H264PROFILE_MAX)
643    return false;
644
645  // We mimic the steps CoCreateInstance uses to instantiate the object. This
646  // was previously done because it failed inside the sandbox, and now is done
647  // as a more minimal approach to avoid other side-effects CCI might have (as
648  // we are still in a reduced sandbox).
649  HMODULE decoder_dll = ::LoadLibrary(L"msmpeg2vdec.dll");
650  RETURN_ON_FAILURE(decoder_dll,
651                    "msmpeg2vdec.dll required for decoding is not loaded",
652                    false);
653
654  // Check version of DLL, version 6.7.7140 is blacklisted due to high crash
655  // rates in browsers loading that DLL. If that is the version installed we
656  // fall back to software decoding. See crbug/403440.
657  FileVersionInfo* version_info =
658      FileVersionInfo::CreateFileVersionInfoForModule(decoder_dll);
659  RETURN_ON_FAILURE(version_info,
660                    "unable to get version of msmpeg2vdec.dll",
661                    false);
662  base::string16 file_version = version_info->file_version();
663  RETURN_ON_FAILURE(file_version.find(L"6.1.7140") == base::string16::npos,
664                    "blacklisted version of msmpeg2vdec.dll 6.7.7140",
665                    false);
666
667  typedef HRESULT(WINAPI * GetClassObject)(
668      const CLSID & clsid, const IID & iid, void * *object);
669
670  GetClassObject get_class_object = reinterpret_cast<GetClassObject>(
671      GetProcAddress(decoder_dll, "DllGetClassObject"));
672  RETURN_ON_FAILURE(
673      get_class_object, "Failed to get DllGetClassObject pointer", false);
674
675  base::win::ScopedComPtr<IClassFactory> factory;
676  HRESULT hr = get_class_object(__uuidof(CMSH264DecoderMFT),
677                                __uuidof(IClassFactory),
678                                reinterpret_cast<void**>(factory.Receive()));
679  RETURN_ON_HR_FAILURE(hr, "DllGetClassObject for decoder failed", false);
680
681  hr = factory->CreateInstance(NULL,
682                               __uuidof(IMFTransform),
683                               reinterpret_cast<void**>(decoder_.Receive()));
684  RETURN_ON_HR_FAILURE(hr, "Failed to create decoder instance", false);
685
686  RETURN_ON_FAILURE(CheckDecoderDxvaSupport(),
687                    "Failed to check decoder DXVA support", false);
688
689  hr = decoder_->ProcessMessage(
690            MFT_MESSAGE_SET_D3D_MANAGER,
691            reinterpret_cast<ULONG_PTR>(device_manager_.get()));
692  RETURN_ON_HR_FAILURE(hr, "Failed to pass D3D manager to decoder", false);
693
694  EGLDisplay egl_display = gfx::GLSurfaceEGL::GetHardwareDisplay();
695
696  EGLint config_attribs[] = {
697    EGL_BUFFER_SIZE, 32,
698    EGL_RED_SIZE, 8,
699    EGL_GREEN_SIZE, 8,
700    EGL_BLUE_SIZE, 8,
701    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
702    EGL_ALPHA_SIZE, 0,
703    EGL_NONE
704  };
705
706  EGLint num_configs;
707
708  if (!eglChooseConfig(
709      egl_display,
710      config_attribs,
711      &egl_config_,
712      1,
713      &num_configs))
714    return false;
715
716  return SetDecoderMediaTypes();
717}
718
719bool DXVAVideoDecodeAccelerator::CheckDecoderDxvaSupport() {
720  base::win::ScopedComPtr<IMFAttributes> attributes;
721  HRESULT hr = decoder_->GetAttributes(attributes.Receive());
722  RETURN_ON_HR_FAILURE(hr, "Failed to get decoder attributes", false);
723
724  UINT32 dxva = 0;
725  hr = attributes->GetUINT32(MF_SA_D3D_AWARE, &dxva);
726  RETURN_ON_HR_FAILURE(hr, "Failed to check if decoder supports DXVA", false);
727
728  hr = attributes->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
729  RETURN_ON_HR_FAILURE(hr, "Failed to enable DXVA H/W decoding", false);
730  return true;
731}
732
733bool DXVAVideoDecodeAccelerator::SetDecoderMediaTypes() {
734  RETURN_ON_FAILURE(SetDecoderInputMediaType(),
735                    "Failed to set decoder input media type", false);
736  return SetDecoderOutputMediaType(MFVideoFormat_NV12);
737}
738
739bool DXVAVideoDecodeAccelerator::SetDecoderInputMediaType() {
740  base::win::ScopedComPtr<IMFMediaType> media_type;
741  HRESULT hr = MFCreateMediaType(media_type.Receive());
742  RETURN_ON_HR_FAILURE(hr, "MFCreateMediaType failed", false);
743
744  hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
745  RETURN_ON_HR_FAILURE(hr, "Failed to set major input type", false);
746
747  hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
748  RETURN_ON_HR_FAILURE(hr, "Failed to set subtype", false);
749
750  // Not sure about this. msdn recommends setting this value on the input
751  // media type.
752  hr = media_type->SetUINT32(MF_MT_INTERLACE_MODE,
753                             MFVideoInterlace_MixedInterlaceOrProgressive);
754  RETURN_ON_HR_FAILURE(hr, "Failed to set interlace mode", false);
755
756  hr = decoder_->SetInputType(0, media_type, 0);  // No flags
757  RETURN_ON_HR_FAILURE(hr, "Failed to set decoder input type", false);
758  return true;
759}
760
761bool DXVAVideoDecodeAccelerator::SetDecoderOutputMediaType(
762    const GUID& subtype) {
763  base::win::ScopedComPtr<IMFMediaType> out_media_type;
764
765  for (uint32 i = 0;
766       SUCCEEDED(decoder_->GetOutputAvailableType(0, i,
767                                                  out_media_type.Receive()));
768       ++i) {
769    GUID out_subtype = {0};
770    HRESULT hr = out_media_type->GetGUID(MF_MT_SUBTYPE, &out_subtype);
771    RETURN_ON_HR_FAILURE(hr, "Failed to get output major type", false);
772
773    if (out_subtype == subtype) {
774      hr = decoder_->SetOutputType(0, out_media_type, 0);  // No flags
775      RETURN_ON_HR_FAILURE(hr, "Failed to set decoder output type", false);
776      return true;
777    }
778    out_media_type.Release();
779  }
780  return false;
781}
782
783bool DXVAVideoDecodeAccelerator::SendMFTMessage(MFT_MESSAGE_TYPE msg,
784                                                int32 param) {
785  HRESULT hr = decoder_->ProcessMessage(msg, param);
786  return SUCCEEDED(hr);
787}
788
789// Gets the minimum buffer sizes for input and output samples. The MFT will not
790// allocate buffer for input nor output, so we have to do it ourselves and make
791// sure they're the correct size. We only provide decoding if DXVA is enabled.
792bool DXVAVideoDecodeAccelerator::GetStreamsInfoAndBufferReqs() {
793  HRESULT hr = decoder_->GetInputStreamInfo(0, &input_stream_info_);
794  RETURN_ON_HR_FAILURE(hr, "Failed to get input stream info", false);
795
796  hr = decoder_->GetOutputStreamInfo(0, &output_stream_info_);
797  RETURN_ON_HR_FAILURE(hr, "Failed to get decoder output stream info", false);
798
799  DVLOG(1) << "Input stream info: ";
800  DVLOG(1) << "Max latency: " << input_stream_info_.hnsMaxLatency;
801  // There should be three flags, one for requiring a whole frame be in a
802  // single sample, one for requiring there be one buffer only in a single
803  // sample, and one that specifies a fixed sample size. (as in cbSize)
804  CHECK_EQ(input_stream_info_.dwFlags, 0x7u);
805
806  DVLOG(1) << "Min buffer size: " << input_stream_info_.cbSize;
807  DVLOG(1) << "Max lookahead: " << input_stream_info_.cbMaxLookahead;
808  DVLOG(1) << "Alignment: " << input_stream_info_.cbAlignment;
809
810  DVLOG(1) << "Output stream info: ";
811  // The flags here should be the same and mean the same thing, except when
812  // DXVA is enabled, there is an extra 0x100 flag meaning decoder will
813  // allocate its own sample.
814  DVLOG(1) << "Flags: "
815          << std::hex << std::showbase << output_stream_info_.dwFlags;
816  CHECK_EQ(output_stream_info_.dwFlags, 0x107u);
817  DVLOG(1) << "Min buffer size: " << output_stream_info_.cbSize;
818  DVLOG(1) << "Alignment: " << output_stream_info_.cbAlignment;
819  return true;
820}
821
822void DXVAVideoDecodeAccelerator::DoDecode() {
823  // This function is also called from FlushInternal in a loop which could
824  // result in the state transitioning to kStopped due to no decoded output.
825  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kNormal || state_ == kFlushing ||
826                                state_ == kStopped),
827      "DoDecode: not in normal/flushing/stopped state", ILLEGAL_STATE,);
828
829  MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0};
830  DWORD status = 0;
831
832  HRESULT hr = decoder_->ProcessOutput(0,  // No flags
833                                       1,  // # of out streams to pull from
834                                       &output_data_buffer,
835                                       &status);
836  IMFCollection* events = output_data_buffer.pEvents;
837  if (events != NULL) {
838    VLOG(1) << "Got events from ProcessOuput, but discarding";
839    events->Release();
840  }
841  if (FAILED(hr)) {
842    // A stream change needs further ProcessInput calls to get back decoder
843    // output which is why we need to set the state to stopped.
844    if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
845      if (!SetDecoderOutputMediaType(MFVideoFormat_NV12)) {
846        // Decoder didn't let us set NV12 output format. Not sure as to why
847        // this can happen. Give up in disgust.
848        NOTREACHED() << "Failed to set decoder output media type to NV12";
849        state_ = kStopped;
850      } else {
851        DVLOG(1) << "Received output format change from the decoder."
852                    " Recursively invoking DoDecode";
853        DoDecode();
854      }
855      return;
856    } else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
857      // No more output from the decoder. Stop playback.
858      state_ = kStopped;
859      return;
860    } else {
861      NOTREACHED() << "Unhandled error in DoDecode()";
862      return;
863    }
864  }
865  TRACE_EVENT_END_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
866
867  TRACE_COUNTER1("DXVA Decoding", "TotalPacketsBeforeDecode",
868                 inputs_before_decode_);
869
870  inputs_before_decode_ = 0;
871
872  RETURN_AND_NOTIFY_ON_FAILURE(ProcessOutputSample(output_data_buffer.pSample),
873      "Failed to process output sample.", PLATFORM_FAILURE,);
874}
875
876bool DXVAVideoDecodeAccelerator::ProcessOutputSample(IMFSample* sample) {
877  RETURN_ON_FAILURE(sample, "Decode succeeded with NULL output sample", false);
878
879  base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
880  HRESULT hr = sample->GetBufferByIndex(0, output_buffer.Receive());
881  RETURN_ON_HR_FAILURE(hr, "Failed to get buffer from output sample", false);
882
883  base::win::ScopedComPtr<IDirect3DSurface9> surface;
884  hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
885                    IID_PPV_ARGS(surface.Receive()));
886  RETURN_ON_HR_FAILURE(hr, "Failed to get D3D surface from output sample",
887                       false);
888
889  LONGLONG input_buffer_id = 0;
890  RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
891                       "Failed to get input buffer id associated with sample",
892                       false);
893
894  pending_output_samples_.push_back(
895      PendingSampleInfo(input_buffer_id, sample));
896
897  // If we have available picture buffers to copy the output data then use the
898  // first one and then flag it as not being available for use.
899  if (output_picture_buffers_.size()) {
900    ProcessPendingSamples();
901    return true;
902  }
903  if (pictures_requested_) {
904    DVLOG(1) << "Waiting for picture slots from the client.";
905    return true;
906  }
907
908  // We only read the surface description, which contains its width/height when
909  // we need the picture buffers from the client. Once we have those, then they
910  // are reused.
911  D3DSURFACE_DESC surface_desc;
912  hr = surface->GetDesc(&surface_desc);
913  RETURN_ON_HR_FAILURE(hr, "Failed to get surface description", false);
914
915  // Go ahead and request picture buffers.
916  base::MessageLoop::current()->PostTask(
917      FROM_HERE,
918      base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
919                 weak_this_factory_.GetWeakPtr(),
920                 surface_desc.Width,
921                 surface_desc.Height));
922
923  pictures_requested_ = true;
924  return true;
925}
926
927void DXVAVideoDecodeAccelerator::ProcessPendingSamples() {
928  RETURN_AND_NOTIFY_ON_FAILURE(make_context_current_.Run(),
929      "Failed to make context current", PLATFORM_FAILURE,);
930
931  OutputBuffers::iterator index;
932
933  for (index = output_picture_buffers_.begin();
934       index != output_picture_buffers_.end() &&
935       !pending_output_samples_.empty();
936       ++index) {
937    if (index->second->available()) {
938      PendingSampleInfo sample_info = pending_output_samples_.front();
939
940      base::win::ScopedComPtr<IMFMediaBuffer> output_buffer;
941      HRESULT hr = sample_info.output_sample->GetBufferByIndex(
942          0, output_buffer.Receive());
943      RETURN_AND_NOTIFY_ON_HR_FAILURE(
944          hr, "Failed to get buffer from output sample", PLATFORM_FAILURE,);
945
946      base::win::ScopedComPtr<IDirect3DSurface9> surface;
947      hr = MFGetService(output_buffer, MR_BUFFER_SERVICE,
948                        IID_PPV_ARGS(surface.Receive()));
949      RETURN_AND_NOTIFY_ON_HR_FAILURE(
950          hr, "Failed to get D3D surface from output sample",
951          PLATFORM_FAILURE,);
952
953      D3DSURFACE_DESC surface_desc;
954      hr = surface->GetDesc(&surface_desc);
955      RETURN_AND_NOTIFY_ON_HR_FAILURE(
956          hr, "Failed to get surface description", PLATFORM_FAILURE,);
957
958      if (surface_desc.Width !=
959              static_cast<uint32>(index->second->size().width()) ||
960          surface_desc.Height !=
961              static_cast<uint32>(index->second->size().height())) {
962        HandleResolutionChanged(surface_desc.Width, surface_desc.Height);
963        return;
964      }
965
966      RETURN_AND_NOTIFY_ON_FAILURE(
967          index->second->CopyOutputSampleDataToPictureBuffer(*this, surface),
968          "Failed to copy output sample",
969          PLATFORM_FAILURE, );
970
971      media::Picture output_picture(index->second->id(),
972                                    sample_info.input_buffer_id,
973                                    gfx::Rect(index->second->size()));
974      base::MessageLoop::current()->PostTask(
975          FROM_HERE,
976          base::Bind(&DXVAVideoDecodeAccelerator::NotifyPictureReady,
977                     weak_this_factory_.GetWeakPtr(),
978                     output_picture));
979
980      index->second->set_available(false);
981      pending_output_samples_.pop_front();
982    }
983  }
984
985  if (!pending_input_buffers_.empty() && pending_output_samples_.empty()) {
986    base::MessageLoop::current()->PostTask(
987        FROM_HERE,
988        base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
989                   weak_this_factory_.GetWeakPtr()));
990  }
991}
992
993void DXVAVideoDecodeAccelerator::StopOnError(
994  media::VideoDecodeAccelerator::Error error) {
995  DCHECK(CalledOnValidThread());
996
997  if (client_)
998    client_->NotifyError(error);
999  client_ = NULL;
1000
1001  if (state_ != kUninitialized) {
1002    Invalidate();
1003  }
1004}
1005
1006void DXVAVideoDecodeAccelerator::Invalidate() {
1007  if (state_ == kUninitialized)
1008    return;
1009  weak_this_factory_.InvalidateWeakPtrs();
1010  output_picture_buffers_.clear();
1011  stale_output_picture_buffers_.clear();
1012  pending_output_samples_.clear();
1013  pending_input_buffers_.clear();
1014  decoder_.Release();
1015  MFShutdown();
1016  state_ = kUninitialized;
1017}
1018
1019void DXVAVideoDecodeAccelerator::NotifyInputBufferRead(int input_buffer_id) {
1020  if (client_)
1021    client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1022}
1023
1024void DXVAVideoDecodeAccelerator::NotifyFlushDone() {
1025  if (client_)
1026    client_->NotifyFlushDone();
1027}
1028
1029void DXVAVideoDecodeAccelerator::NotifyResetDone() {
1030  if (client_)
1031    client_->NotifyResetDone();
1032}
1033
1034void DXVAVideoDecodeAccelerator::RequestPictureBuffers(int width, int height) {
1035  // This task could execute after the decoder has been torn down.
1036  if (state_ != kUninitialized && client_) {
1037    client_->ProvidePictureBuffers(
1038        kNumPictureBuffers,
1039        gfx::Size(width, height),
1040        GL_TEXTURE_2D);
1041  }
1042}
1043
1044void DXVAVideoDecodeAccelerator::NotifyPictureReady(
1045    const media::Picture& picture) {
1046  // This task could execute after the decoder has been torn down.
1047  if (state_ != kUninitialized && client_)
1048    client_->PictureReady(picture);
1049}
1050
1051void DXVAVideoDecodeAccelerator::NotifyInputBuffersDropped() {
1052  if (!client_ || !pending_output_samples_.empty())
1053    return;
1054
1055  for (PendingInputs::iterator it = pending_input_buffers_.begin();
1056       it != pending_input_buffers_.end(); ++it) {
1057    LONGLONG input_buffer_id = 0;
1058    RETURN_ON_HR_FAILURE((*it)->GetSampleTime(&input_buffer_id),
1059                         "Failed to get buffer id associated with sample",);
1060    client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
1061  }
1062  pending_input_buffers_.clear();
1063}
1064
1065void DXVAVideoDecodeAccelerator::DecodePendingInputBuffers() {
1066  RETURN_AND_NOTIFY_ON_FAILURE((state_ != kUninitialized),
1067      "Invalid state: " << state_, ILLEGAL_STATE,);
1068
1069  if (pending_input_buffers_.empty() || !pending_output_samples_.empty())
1070    return;
1071
1072  PendingInputs pending_input_buffers_copy;
1073  std::swap(pending_input_buffers_, pending_input_buffers_copy);
1074
1075  for (PendingInputs::iterator it = pending_input_buffers_copy.begin();
1076       it != pending_input_buffers_copy.end(); ++it) {
1077    DecodeInternal(*it);
1078  }
1079}
1080
1081void DXVAVideoDecodeAccelerator::FlushInternal() {
1082  // The DoDecode function sets the state to kStopped when the decoder returns
1083  // MF_E_TRANSFORM_NEED_MORE_INPUT.
1084  // The MFT decoder can buffer upto 30 frames worth of input before returning
1085  // an output frame. This loop here attempts to retrieve as many output frames
1086  // as possible from the buffered set.
1087  while (state_ != kStopped) {
1088    DoDecode();
1089    if (!pending_output_samples_.empty())
1090      return;
1091  }
1092
1093  base::MessageLoop::current()->PostTask(
1094      FROM_HERE,
1095      base::Bind(&DXVAVideoDecodeAccelerator::NotifyFlushDone,
1096                 weak_this_factory_.GetWeakPtr()));
1097
1098  state_ = kNormal;
1099}
1100
1101void DXVAVideoDecodeAccelerator::DecodeInternal(
1102    const base::win::ScopedComPtr<IMFSample>& sample) {
1103  DCHECK(CalledOnValidThread());
1104
1105  if (state_ == kUninitialized)
1106    return;
1107
1108  if (!pending_output_samples_.empty() || !pending_input_buffers_.empty()) {
1109    pending_input_buffers_.push_back(sample);
1110    return;
1111  }
1112
1113  if (!inputs_before_decode_) {
1114    TRACE_EVENT_BEGIN_ETW("DXVAVideoDecodeAccelerator.Decoding", this, "");
1115  }
1116  inputs_before_decode_++;
1117
1118  HRESULT hr = decoder_->ProcessInput(0, sample, 0);
1119  // As per msdn if the decoder returns MF_E_NOTACCEPTING then it means that it
1120  // has enough data to produce one or more output samples. In this case the
1121  // recommended options are to
1122  // 1. Generate new output by calling IMFTransform::ProcessOutput until it
1123  //    returns MF_E_TRANSFORM_NEED_MORE_INPUT.
1124  // 2. Flush the input data
1125  // We implement the first option, i.e to retrieve the output sample and then
1126  // process the input again. Failure in either of these steps is treated as a
1127  // decoder failure.
1128  if (hr == MF_E_NOTACCEPTING) {
1129    DoDecode();
1130    RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1131        "Failed to process output. Unexpected decoder state: " << state_,
1132        PLATFORM_FAILURE,);
1133    hr = decoder_->ProcessInput(0, sample, 0);
1134    // If we continue to get the MF_E_NOTACCEPTING error we do the following:-
1135    // 1. Add the input sample to the pending queue.
1136    // 2. If we don't have any output samples we post the
1137    //    DecodePendingInputBuffers task to process the pending input samples.
1138    //    If we have an output sample then the above task is posted when the
1139    //    output samples are sent to the client.
1140    // This is because we only support 1 pending output sample at any
1141    // given time due to the limitation with the Microsoft media foundation
1142    // decoder where it recycles the output Decoder surfaces.
1143    if (hr == MF_E_NOTACCEPTING) {
1144      pending_input_buffers_.push_back(sample);
1145      if (pending_output_samples_.empty()) {
1146        base::MessageLoop::current()->PostTask(
1147            FROM_HERE,
1148            base::Bind(&DXVAVideoDecodeAccelerator::DecodePendingInputBuffers,
1149                       weak_this_factory_.GetWeakPtr()));
1150      }
1151      return;
1152    }
1153  }
1154  RETURN_AND_NOTIFY_ON_HR_FAILURE(hr, "Failed to process input sample",
1155      PLATFORM_FAILURE,);
1156
1157  DoDecode();
1158
1159  RETURN_AND_NOTIFY_ON_FAILURE((state_ == kStopped || state_ == kNormal),
1160      "Failed to process output. Unexpected decoder state: " << state_,
1161      ILLEGAL_STATE,);
1162
1163  LONGLONG input_buffer_id = 0;
1164  RETURN_ON_HR_FAILURE(sample->GetSampleTime(&input_buffer_id),
1165                       "Failed to get input buffer id associated with sample",);
1166  // The Microsoft Media foundation decoder internally buffers up to 30 frames
1167  // before returning a decoded frame. We need to inform the client that this
1168  // input buffer is processed as it may stop sending us further input.
1169  // Note: This may break clients which expect every input buffer to be
1170  // associated with a decoded output buffer.
1171  // TODO(ananta)
1172  // Do some more investigation into whether it is possible to get the MFT
1173  // decoder to emit an output packet for every input packet.
1174  // http://code.google.com/p/chromium/issues/detail?id=108121
1175  // http://code.google.com/p/chromium/issues/detail?id=150925
1176  base::MessageLoop::current()->PostTask(
1177      FROM_HERE,
1178      base::Bind(&DXVAVideoDecodeAccelerator::NotifyInputBufferRead,
1179                 weak_this_factory_.GetWeakPtr(),
1180                 input_buffer_id));
1181}
1182
1183void DXVAVideoDecodeAccelerator::HandleResolutionChanged(int width,
1184                                                         int height) {
1185  base::MessageLoop::current()->PostTask(
1186      FROM_HERE,
1187      base::Bind(&DXVAVideoDecodeAccelerator::DismissStaleBuffers,
1188                 weak_this_factory_.GetWeakPtr()));
1189
1190  base::MessageLoop::current()->PostTask(
1191      FROM_HERE,
1192      base::Bind(&DXVAVideoDecodeAccelerator::RequestPictureBuffers,
1193                 weak_this_factory_.GetWeakPtr(),
1194                 width,
1195                 height));
1196}
1197
1198void DXVAVideoDecodeAccelerator::DismissStaleBuffers() {
1199  OutputBuffers::iterator index;
1200
1201  for (index = output_picture_buffers_.begin();
1202       index != output_picture_buffers_.end();
1203       ++index) {
1204    if (index->second->available()) {
1205      DVLOG(1) << "Dismissing picture id: " << index->second->id();
1206      client_->DismissPictureBuffer(index->second->id());
1207    } else {
1208      // Move to |stale_output_picture_buffers_| for deferred deletion.
1209      stale_output_picture_buffers_.insert(
1210          std::make_pair(index->first, index->second));
1211    }
1212  }
1213
1214  output_picture_buffers_.clear();
1215}
1216
1217void DXVAVideoDecodeAccelerator::DeferredDismissStaleBuffer(
1218    int32 picture_buffer_id) {
1219  OutputBuffers::iterator it = stale_output_picture_buffers_.find(
1220      picture_buffer_id);
1221  DCHECK(it != stale_output_picture_buffers_.end());
1222  DVLOG(1) << "Dismissing picture id: " << it->second->id();
1223  client_->DismissPictureBuffer(it->second->id());
1224  stale_output_picture_buffers_.erase(it);
1225}
1226
1227}  // namespace content
1228