core_audio_util_win.h revision 3551c9c881056c480085172ff9840cab31610854
1// Copyright (c) 2012 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5// Utility methods for the Core Audio API on Windows.
6// Always ensure that Core Audio is supported before using these methods.
7// Use media::CoreAudioIsSupported() for this purpose.
8// Also, all methods must be called on a valid COM thread. This can be done
9// by using the base::win::ScopedCOMInitializer helper class.
10
11#ifndef MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
12#define MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
13
14#include <audioclient.h>
15#include <mmdeviceapi.h>
16#include <string>
17
18#include "base/basictypes.h"
19#include "base/time/time.h"
20#include "base/win/scoped_comptr.h"
21#include "media/audio/audio_device_name.h"
22#include "media/audio/audio_parameters.h"
23#include "media/base/media_export.h"
24
25using base::win::ScopedComPtr;
26
27namespace media {
28
29class MEDIA_EXPORT CoreAudioUtil {
30 public:
31  // Returns true if Windows Core Audio is supported.
32  // Always verify that this method returns true before using any of the
33  // methods in this class.
34  static bool IsSupported();
35
36  // Converts between reference time to base::TimeDelta.
37  // One reference-time unit is 100 nanoseconds.
38  // Example: double s = RefererenceTimeToTimeDelta(t).InMillisecondsF();
39  static base::TimeDelta RefererenceTimeToTimeDelta(REFERENCE_TIME time);
40
41  // Returns AUDCLNT_SHAREMODE_EXCLUSIVE if --enable-exclusive-mode is used
42  // as command-line flag and AUDCLNT_SHAREMODE_SHARED otherwise (default).
43  static AUDCLNT_SHAREMODE GetShareMode();
44
45  // The Windows Multimedia Device (MMDevice) API enables audio clients to
46  // discover audio endpoint devices and determine their capabilities.
47
48  // Number of active audio devices in the specified flow data flow direction.
49  // Set |data_flow| to eAll to retrieve the total number of active audio
50  // devices.
51  static int NumberOfActiveDevices(EDataFlow data_flow);
52
53  // Creates an IMMDeviceEnumerator interface which provides methods for
54  // enumerating audio endpoint devices.
55  static ScopedComPtr<IMMDeviceEnumerator> CreateDeviceEnumerator();
56
57  // Creates a default endpoint device that is specified by a data-flow
58  // direction and role, e.g. default render device.
59  static ScopedComPtr<IMMDevice> CreateDefaultDevice(
60      EDataFlow data_flow, ERole role);
61
62  // Creates an endpoint device that is specified by a unique endpoint device-
63  // identification string.
64  static ScopedComPtr<IMMDevice> CreateDevice(const std::string& device_id);
65
66  // Returns the unique ID and user-friendly name of a given endpoint device.
67  // Example: "{0.0.1.00000000}.{8db6020f-18e3-4f25-b6f5-7726c9122574}", and
68  //          "Microphone (Realtek High Definition Audio)".
69  static HRESULT GetDeviceName(IMMDevice* device, AudioDeviceName* name);
70
71  // Returns the device ID/path of the controller (a.k.a. physical device that
72  // |device| is connected to.  This ID will be the same for all devices from
73  // the same controller so it is useful for doing things like determining
74  // whether a set of output and input devices belong to the same controller.
75  // The device enumerator is required as well as the device itself since
76  // looking at the device topology is required and we need to open up
77  // associated devices to determine the controller id.
78  // If the ID could not be determined for some reason, an empty string is
79  // returned.
80  static std::string GetAudioControllerID(IMMDevice* device,
81      IMMDeviceEnumerator* enumerator);
82
83  // Gets the user-friendly name of the endpoint device which is represented
84  // by a unique id in |device_id|.
85  static std::string GetFriendlyName(const std::string& device_id);
86
87  // Returns true if the provided unique |device_id| corresponds to the current
88  // default device for the specified by a data-flow direction and role.
89  static bool DeviceIsDefault(
90      EDataFlow flow, ERole role, const std::string& device_id);
91
92  // Query if the audio device is a rendering device or a capture device.
93  static EDataFlow GetDataFlow(IMMDevice* device);
94
95  // The Windows Audio Session API (WASAPI) enables client applications to
96  // manage the flow of audio data between the application and an audio endpoint
97  // device.
98
99  // Create an IAudioClient interface for the default IMMDevice where
100  // flow direction and role is define by |data_flow| and |role|.
101  // The IAudioClient interface enables a client to create and initialize an
102  // audio stream between an audio application and the audio engine (for a
103  // shared-mode stream) or the hardware buffer of an audio endpoint device
104  // (for an exclusive-mode stream).
105  static ScopedComPtr<IAudioClient> CreateDefaultClient(EDataFlow data_flow,
106                                                        ERole role);
107
108  // Create an IAudioClient interface for an existing IMMDevice given by
109  // |audio_device|. Flow direction and role is define by the |audio_device|.
110  static ScopedComPtr<IAudioClient> CreateClient(IMMDevice* audio_device);
111
112  // Get the mix format that the audio engine uses internally for processing
113  // of shared-mode streams. This format is not necessarily a format that the
114  // audio endpoint device supports. Thus, the caller might not succeed in
115  // creating an exclusive-mode stream with a format obtained by this method.
116  static HRESULT GetSharedModeMixFormat(IAudioClient* client,
117                                        WAVEFORMATPCMEX* format);
118
119  // Get the mix format that the audio engine uses internally for processing
120  // of shared-mode streams using the default IMMDevice where flow direction
121  // and role is define by |data_flow| and |role|.
122  static HRESULT GetDefaultSharedModeMixFormat(EDataFlow data_flow,
123                                               ERole role,
124                                               WAVEFORMATPCMEX* format);
125
126  // Returns true if the specified |client| supports the format in |format|
127  // for the given |share_mode| (shared or exclusive).
128  static bool IsFormatSupported(IAudioClient* client,
129                                AUDCLNT_SHAREMODE share_mode,
130                                const WAVEFORMATPCMEX* format);
131
132  // Returns true if the specified |channel_layout| is supported for the
133  // default IMMDevice where flow direction and role is define by |data_flow|
134  // and |role|. If this method returns true for a certain channel layout, it
135  // means that SharedModeInitialize() will succeed using a format based on
136  // the preferred format where the channel layout has been modified.
137  static bool IsChannelLayoutSupported(EDataFlow data_flow, ERole role,
138                                       ChannelLayout channel_layout);
139
140  // For a shared-mode stream, the audio engine periodically processes the
141  // data in the endpoint buffer at the period obtained in |device_period|.
142  // For an exclusive mode stream, |device_period| corresponds to the minimum
143  // time interval between successive processing by the endpoint device.
144  // This period plus the stream latency between the buffer and endpoint device
145  // represents the minimum possible latency that an audio application can
146  // achieve. The time in |device_period| is expressed in 100-nanosecond units.
147  static HRESULT GetDevicePeriod(IAudioClient* client,
148                                 AUDCLNT_SHAREMODE share_mode,
149                                 REFERENCE_TIME* device_period);
150
151  // Get the preferred audio parameters for the specified |client| or the
152  // given direction and role is define by |data_flow| and |role|, or the
153  // unique device id given by |device_id|.
154  // The acquired values should only be utilized for shared mode streamed since
155  // there are no preferred settings for an exclusive mode stream.
156  static HRESULT GetPreferredAudioParameters(IAudioClient* client,
157                                             AudioParameters* params);
158  static HRESULT GetPreferredAudioParameters(EDataFlow data_flow, ERole role,
159                                             AudioParameters* params);
160  static HRESULT GetPreferredAudioParameters(const std::string& device_id,
161                                             AudioParameters* params);
162
163  // After activating an IAudioClient interface on an audio endpoint device,
164  // the client must initialize it once, and only once, to initialize the audio
165  // stream between the client and the device. In shared mode, the client
166  // connects indirectly through the audio engine which does the mixing.
167  // In exclusive mode, the client connects directly to the audio hardware.
168  // If a valid event is provided in |event_handle|, the client will be
169  // initialized for event-driven buffer handling. If |event_handle| is set to
170  // NULL, event-driven buffer handling is not utilized.
171  static HRESULT SharedModeInitialize(IAudioClient* client,
172                                      const WAVEFORMATPCMEX* format,
173                                      HANDLE event_handle,
174                                      uint32* endpoint_buffer_size);
175  // TODO(henrika): add ExclusiveModeInitialize(...)
176
177  // Create an IAudioRenderClient client for an existing IAudioClient given by
178  // |client|. The IAudioRenderClient interface enables a client to write
179  // output data to a rendering endpoint buffer.
180  static ScopedComPtr<IAudioRenderClient> CreateRenderClient(
181      IAudioClient* client);
182
183  // Create an IAudioCaptureClient client for an existing IAudioClient given by
184  // |client|. The IAudioCaptureClient interface enables a client to read
185  // input data from a capture endpoint buffer.
186  static ScopedComPtr<IAudioCaptureClient> CreateCaptureClient(
187      IAudioClient* client);
188
189  // Fills up the endpoint rendering buffer with silence for an existing
190  // IAudioClient given by |client| and a corresponding IAudioRenderClient
191  // given by |render_client|.
192  static bool FillRenderEndpointBufferWithSilence(
193      IAudioClient* client, IAudioRenderClient* render_client);
194
195 private:
196  CoreAudioUtil() {}
197  ~CoreAudioUtil() {}
198  DISALLOW_COPY_AND_ASSIGN(CoreAudioUtil);
199};
200
201}  // namespace media
202
203#endif  // MEDIA_AUDIO_WIN_CORE_AUDIO_UTIL_WIN_H_
204