Searched defs:audio (Results 1 - 21 of 21) sorted by relevance

/frameworks/base/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/functional/audio/
H A DSimTonesTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaBassBoostTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaPresetReverbTest.java17 package com.android.mediaframeworktest.functional.audio;
210 // creating a volume controller on output mix ensures that ro.audio.silent mutes
211 // audio after the effects and not before
281 // creating a volume controller on output mix ensures that ro.audio.silent mutes
282 // audio after the effects and not before
H A DMediaVirtualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaAudioManagerTest.java17 package com.android.mediaframeworktest.functional.audio;
155 // the audio focus request is async, so wait a bit to verify it had the expected effect
175 //Test case 1: test audio focus listener loses audio focus:
186 //Test case 2: test audio focus listener loses audio focus:
198 //Test case 3: test audio focus listener loses audio focus:
210 //Test case 4: test audio focus registering and use over 3000 iterations
220 assertTrue("audio focu
[all...]
H A DMediaEnvReverbTest.java17 package com.android.mediaframeworktest.functional.audio;
365 // creating a volume controller on output mix ensures that ro.audio.silent mutes
366 // audio after the effects and not before
438 // creating a volume controller on output mix ensures that ro.audio.silent mutes
439 // audio after the effects and not before
H A DMediaEqualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
H A DMediaVisualizerTest.java17 package com.android.mediaframeworktest.functional.audio;
242 // creating a volume controller on output mix ensures that ro.audio.silent mutes
243 // audio after the effects and not before
321 // creating a volume controller on output mix ensures that ro.audio.silent mutes
322 // audio after the effects and not before
454 // creating a volume controller on output mix ensures that ro.audio.silent mutes
455 // audio after the effects and not before
H A DMediaAudioEffectTest.java17 package com.android.mediaframeworktest.functional.audio;
276 //Test case 1.4: test contructor on mediaPlayer audio session
H A DMediaAudioTrackTest.java17 package com.android.mediaframeworktest.functional.audio;
752 // count (given the audio track properties), and add 77.
/frameworks/av/media/libmediaplayerservice/nuplayer/
H A DStreamingSource.cpp141 sp<MetaData> NuPlayer::StreamingSource::getFormatMeta(bool audio) { argument
143 audio ? ATSParser::AUDIO : ATSParser::VIDEO;
156 bool audio, sp<ABuffer> *accessUnit) {
158 audio ? ATSParser::AUDIO : ATSParser::VIDEO;
155 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
H A DHTTPLiveSource.cpp92 sp<MetaData> NuPlayer::HTTPLiveSource::getFormatMeta(bool audio) { argument
94 audio ? ATSParser::AUDIO : ATSParser::VIDEO;
168 bool audio, sp<ABuffer> *accessUnit) {
170 audio ? ATSParser::AUDIO : ATSParser::VIDEO;
167 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
H A DNuPlayerSource.h64 virtual sp<AMessage> getFormat(bool audio);
67 bool audio, sp<ABuffer> *accessUnit) = 0;
86 virtual sp<MetaData> getFormatMeta(bool audio) { return NULL; } argument
H A DGenericSource.cpp79 if (!strncasecmp(mime, "audio/", 6)) {
138 readBuffer(true /* audio */);
147 readBuffer(false /* audio */);
155 sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) { argument
156 sp<MediaSource> source = audio ? mAudioTrack.mSource : mVideoTrack.mSource;
166 bool audio, sp<ABuffer> *accessUnit) {
167 Track *track = audio ? &mAudioTrack : &mVideoTrack;
180 readBuffer(audio, -1ll);
193 readBuffer(false /* audio */, seekTimeUs, &actualTimeUs);
199 readBuffer(true /* audio */, seekTimeU
165 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
205 readBuffer( bool audio, int64_t seekTimeUs, int64_t *actualTimeUs) argument
[all...]
H A DNuPlayerRenderer.cpp61 bool audio,
65 msg->setInt32("audio", static_cast<int32_t>(audio));
71 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { argument
75 msg->setInt32("audio", static_cast<int32_t>(audio));
80 void NuPlayer::Renderer::flush(bool audio) { argument
83 if (audio) {
93 msg->setInt32("audio", static_cast<int32_t>(audio));
60 queueBuffer( bool audio, const sp<ABuffer> &buffer, const sp<AMessage> &notifyConsumed) argument
417 notifyEOS(bool audio, status_t finalResult) argument
426 int32_t audio; local
512 int32_t audio; local
542 int32_t audio; local
589 notifyFlushComplete(bool audio) argument
596 dropBufferWhileFlushing( bool audio, const sp<AMessage> &msg) argument
[all...]
H A DRTSPSource.cpp147 sp<MetaData> NuPlayer::RTSPSource::getFormatMeta(bool audio) { argument
148 sp<AnotherPacketSource> source = getSource(audio);
176 ALOGV("audio track doesn't have enough data yet. (%.2f secs buffered)",
194 bool audio, sp<ABuffer> *accessUnit) {
207 sp<AnotherPacketSource> source = getSource(audio);
218 sp<AnotherPacketSource> otherSource = getSource(!audio);
232 int64_t eosTimeout = audio ? mEOSTimeoutAudio : mEOSTimeoutVideo;
234 setEOSTimeout(audio, ALooper::GetNowUs());
236 setEOSTimeout(audio, 0);
258 setEOSTimeout(audio,
193 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
263 getSource(bool audio) argument
274 setEOSTimeout(bool audio, int64_t timeout) argument
[all...]
H A DNuPlayer.cpp439 // We're not currently decoding anything (no audio or
462 bool audio = msg->what() == kWhatAudioNotify; local
472 audio, codecRequest);
484 ALOGV("got %s decoder EOS", audio ? "audio" : "video");
487 audio ? "audio" : "video",
491 mRenderer->queueEOS(audio, err);
495 if (audio) {
505 ALOGV("decoder %s flush completed", audio
653 int32_t audio; local
695 int32_t audio; local
810 instantiateDecoder(bool audio, sp<Decoder> *decoder) argument
840 feedDecoderInputData(bool audio, const sp<AMessage> &msg) argument
965 renderBuffer(bool audio, const sp<AMessage> &msg) argument
1025 flushDecoder(bool audio, bool needShutdown) argument
1066 getFormat(bool audio) argument
[all...]
/frameworks/av/media/libmediaplayerservice/nuplayer/mp4/
H A DMP4Source.cpp135 sp<AMessage> MP4Source::getFormat(bool audio) { argument
136 return mParser->getFormat(audio);
140 bool audio, sp<ABuffer> *accessUnit) {
141 return mParser->dequeueAccessUnit(audio, accessUnit);
139 dequeueAccessUnit( bool audio, sp<ABuffer> *accessUnit) argument
/frameworks/base/core/java/android/speech/srec/
H A DRecognizer.java55 * // create and start audio input
56 * InputStream audio = new MicrophoneInputStream(11025, 11025*5);
87 * // put more audio in the Recognizer
88 * recognizer.putAudio(audio);
100 * // stop the audio device
101 * audio.close();
159 * A separate config file is needed for each audio sample rate.
265 * Process some audio and return the current status.
288 * Put audio samples into the <code>Recognizer</code>.
289 * @param buf holds the audio sample
304 putAudio(InputStream audio) argument
[all...]
/frameworks/native/include/media/openmax/
H A DOMX_Component.h96 OMX_AUDIO_PORTDEFINITIONTYPE audio; member in union:OMX_PARAM_PORTDEFINITIONTYPE::__anon1394
/frameworks/av/media/libstagefright/mp4/
H A DFragmentedMP4Parser.cpp258 sp<AMessage> FragmentedMP4Parser::getFormat(bool audio, bool synchronous) { argument
263 msg->setInt32("audio", audio);
294 msg->setInt32("audio", wantAudio);
347 status_t FragmentedMP4Parser::dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit, argument
352 msg->setInt32("audio", audio);
485 CHECK(msg->findInt32("audio", &wantAudio));
523 CHECK(msg->findInt32("audio", &wantAudio));
554 CHECK(msg->findInt32("audio",
[all...]

Completed in 299 milliseconds