Searched defs:audio (Results 1 - 25 of 28) sorted by relevance

12

/frameworks/base/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/
H A DSimTonesTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaBassBoostTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaPresetReverbTest.java17 package com.android.mediaframeworktest.functional.audio;
210 // creating a volume controller on output mix ensures that ro.audio.silent mutes
211 // audio after the effects and not before
281 // creating a volume controller on output mix ensures that ro.audio.silent mutes
282 // audio after the effects and not before
H A DMediaVirtualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaAudioManagerTest.java17 package com.android.mediaframeworktest.functional.audio;
155 // the audio focus request is async, so wait a bit to verify it had the expected effect
175 //Test case 1: test audio focus listener loses audio focus:
186 //Test case 2: test audio focus listener loses audio focus:
198 //Test case 3: test audio focus listener loses audio focus:
210 //Test case 4: test audio focus registering and use over 3000 iterations
220 assertTrue("audio focu
[all...]
H A DMediaEnvReverbTest.java17 package com.android.mediaframeworktest.functional.audio;
365 // creating a volume controller on output mix ensures that ro.audio.silent mutes
366 // audio after the effects and not before
438 // creating a volume controller on output mix ensures that ro.audio.silent mutes
439 // audio after the effects and not before
H A DMediaEqualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaVisualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
242 // creating a volume controller on output mix ensures that ro.audio.silent mutes
243 // audio after the effects and not before
321 // creating a volume controller on output mix ensures that ro.audio.silent mutes
322 // audio after the effects and not before
454 // creating a volume controller on output mix ensures that ro.audio.silent mutes
455 // audio after the effects and not before
H A DMediaAudioEffectTest.java17 package com.android.mediaframeworktest.functional.audio;
276 //Test case 1.4: test contructor on mediaPlayer audio session
H A DMediaAudioTrackTest.java17 package com.android.mediaframeworktest.functional.audio;
752 // count (given the audio track properties), and add 77.
/frameworks/base/core/java/android/speech/tts/
H A DUtteranceProgressListener.java18 * be soon before audio is played back in the case of a {@link TextToSpeech#speak}
28 * All audio will have been played back by this point for audible output, and all
79 * Called when the TTS engine begins to synthesize the audio for a request.
87 * This is called when the TTS engine starts synthesizing audio for the request. If an
88 * application wishes to know when the audio is about to start playing, {#onStart(String)}
93 * @param sampleRateInHz Sample rate in hertz of the generated audio.
94 * @param audioFormat Audio format of the generated audio. Should be one of
103 * This is called when a chunk of audio is ready for consumption.
106 * The audio parameter is a copy of what will be synthesized to the speakers (when synthesis was
108 * {@link TextToSpeech#synthesizeToFile}). The audio byte
121 onAudioAvailable(String utteranceId, byte[] audio) argument
[all...]
H A DTextToSpeechService.java112 // A thread and it's associated handler for playing back any audio
153 // Tell the audio playback thread to stop.
606 // items to the audio playback handler.
617 // Remove any enqueued audio too.
663 public void dispatchOnAudioAvailable(byte[] audio); argument
666 /** Set of parameters affecting audio output. */
669 * Audio session identifier. May be used to associate audio playback with one of the
682 * Left/right position of the audio, in the range [-1.0f, 1.0f].
877 public void dispatchOnAudioAvailable(byte[] audio) { argument
880 mCallbacks.dispatchOnAudioAvailable(getCallerIdentity(), utteranceId, audio);
[all...]
/frameworks/base/services/core/java/com/android/server/audio/
H A DRecordingActivityMonitor.java17 package com.android.server.audio;
H A DRotationHelper.java17 package com.android.server.audio;
31 * to the audio HALs through AudioSystem.
38 * query the display rotation so audio stays in sync with video/dialogs. This is
99 // use display rotation so audio stays in sync with video/dialogs
H A DFocusRequester.java17 package com.android.server.audio;
27 import com.android.server.audio.MediaFocusControl.AudioFocusDeathHandler;
33 * Class to handle all the information about a user of audio focus. The lifecycle of each
34 * instance is managed by android.media.MediaFocusControl, from its addition to the audio focus
51 * the audio focus gain request that caused the addition of this object in the focus stack.
60 * the audio focus loss received my mFocusDispatcher, is AudioManager.AUDIOFOCUS_NONE if
65 * the audio attributes associated with the focus request
222 * For a given audio focus gain request, return the audio focus loss type that will result
225 * @return the audio focu
[all...]
H A DMediaFocusControl.java17 package com.android.server.audio;
69 * Discard the current audio focus owner.
70 * Notify top of audio focus stack that it lost focus (regardless of possibility to reassign
101 // going through the audio focus stack to signal new focus, traversing order doesn't
113 * Display in the log the current entries in the audio focus stack
176 // is the owner of the audio focus part of the client to remove?
223 * Push the focus requester onto the audio focus stack at the first position immediately
250 * Inner class to monitor audio focus client deaths, and remove them from the audio focus
268 * Indicates whether to notify an audio focu
[all...]
/frameworks/av/services/radio/
H A DRadioService.h76 bool audio);
98 bool mMute; // radio audio source state
99 // when unmuted, audio is routed to the
137 bool audio);
172 bool audio() const { return mAudio; } function in class:android::RadioService::ModuleClient
H A DRadioService.cpp25 #include <system/audio.h>
404 bool audio)
417 moduleClient = new ModuleClient(this, client, config, audio);
423 // There is a limited amount of tuners and a limited amount of radio audio sources per module.
424 // The minimum is one tuner and one audio source.
426 // NOTE: current framework implementation only supports one radio audio source.
428 // to the radio audio source (AUDIO_DEVICE_IN_FM_TUNER).
430 // and can use the audio source if requested.
433 // - If the newly connected client requests the audio source (audio
402 addClient(const sp<IRadioClient>& client, const struct radio_band_config *config, bool audio) argument
645 ModuleClient(const sp<Module>& module, const sp<IRadioClient>& client, const struct radio_band_config *config, bool audio) argument
[all...]
/frameworks/av/media/libmediaplayerservice/nuplayer/
H A DHTTPLiveSource.cpp104 sp<MetaData> NuPlayer::HTTPLiveSource::getFormatMeta(bool audio) { argument
108 audio ? LiveSession::STREAMTYPE_AUDIO
116 sp<AMessage> NuPlayer::HTTPLiveSource::getFormat(bool audio) { argument
121 audio ? LiveSession::STREAMTYPE_AUDIO
144 bool audio, sp<ABuffer> *accessUnit) {
146 audio ? LiveSession::STREAMTYPE_AUDIO
310 sp<AMessage> format = getFormat(false /* audio */);
352 bool audio = changedMask & LiveSession::STREAMTYPE_AUDIO; local
360 notify->setInt32("audio", audio);
143 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
[all...]
H A DStreamingSource.cpp179 sp<AnotherPacketSource> audioTrack = getSource(true /*audio*/);
180 sp<AnotherPacketSource> videoTrack = getSource(false /*audio*/);
188 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
210 sp<AnotherPacketSource> NuPlayer::StreamingSource::getSource(bool audio) { argument
216 audio ? ATSParser::AUDIO : ATSParser::VIDEO);
221 sp<AMessage> NuPlayer::StreamingSource::getFormat(bool audio) { argument
222 sp<AnotherPacketSource> source = getSource(audio);
240 bool audio, sp<ABuffer> *accessUnit) {
241 sp<AnotherPacketSource> source = getSource(audio);
239 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
H A DNuPlayer.h216 inline const sp<DecoderBase> &getDecoder(bool audio) { argument
217 return audio ? mAudioDecoder : mVideoDecoder;
235 bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange = true);
245 void handleFlushComplete(bool audio, bool isDecoder);
254 void flushDecoder(bool audio, bool needShutdown);
267 void performDecoderFlush(FlushCommand audio, FlushCommand video);
277 bool audio, bool video, const sp<AMessage> &reply);
H A DNuPlayerDecoder.cpp53 return property_get_bool("media.stagefright.audio.deep", false /* default_value */);
131 mIsAudio ? "audio" : "video", cbID, mPaused);
179 mIsAudio ? "audio" : "video", err);
260 mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
654 ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video");
701 bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
777 mIsAudio ? "audio" : "video", formatChange, timeChange);
851 mIsAudio ? "audio" : "video",
1109 bool audio = !strncasecmp(oldMime.c_str(), "audio/", strle local
[all...]
H A DRTSPSource.cpp143 sp<MetaData> NuPlayer::RTSPSource::getFormatMeta(bool audio) { argument
144 sp<AnotherPacketSource> source = getSource(audio);
172 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
190 bool audio, sp<ABuffer> *accessUnit) {
195 sp<AnotherPacketSource> source = getSource(audio);
206 if (sourceReachedEOS(!audio)) {
215 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
217 setEOSTimeout(audio, ALooper::GetNowUs());
219 setEOSTimeout(audio, 0);
225 if (!sourceNearEOS(!audio)) {
189 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
241 getSource(bool audio) argument
252 setEOSTimeout(bool audio, int64_t timeout) argument
390 const bool audio = true; local
404 sourceReachedEOS(bool audio) argument
412 sourceNearEOS(bool audio) argument
434 const bool audio = true; local
[all...]
H A DGenericSource.cpp236 if (!strncasecmp(mime, "audio/", 6)) {
300 ALOGE("failed to start audio track!");
335 bool audio, Vector<MediaBuffer *> &buffers) {
336 if (mIsSecure && !audio && mVideoTrack.mSource != NULL) {
433 sp<MetaData> meta = doGetFormatMeta(false /* audio */);
642 if (!strncasecmp(mime, "audio/", 6)) {
839 sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) { argument
841 msg->setInt32("audio", audio);
855 int32_t audio; local
334 setBuffers( bool audio, Vector<MediaBuffer *> &buffers) argument
877 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
1174 bool audio = !strncasecmp(mime, "audio/", 6); local
1255 bool audio = trackType == MEDIA_TRACK_TYPE_AUDIO; local
[all...]
/frameworks/native/include/media/openmax/
H A DOMX_Component.h96 OMX_AUDIO_PORTDEFINITIONTYPE audio; member in union:OMX_PARAM_PORTDEFINITIONTYPE::__anon1401

Completed in 4630 milliseconds

12