Searched refs:audioFormat (Results 1 - 25 of 25) sorted by relevance

/frameworks/base/core/java/android/bluetooth/
H A DBluetoothAudioConfig.java35 public BluetoothAudioConfig(int sampleRate, int channelConfig, int audioFormat) { argument
38 mAudioFormat = audioFormat;
72 int audioFormat = in.readInt();
73 return new BluetoothAudioConfig(sampleRate, channelConfig, audioFormat);
/frameworks/base/core/java/android/speech/tts/
H A DFileSynthesisCallback.java102 public int start(int sampleRateInHz, int audioFormat, int channelCount) { argument
104 Log.d(TAG, "FileSynthesisRequest.start(" + sampleRateInHz + "," + audioFormat
107 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT &&
108 audioFormat != AudioFormat.ENCODING_PCM_16BIT &&
109 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
110 Log.e(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " +
114 mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount);
132 mAudioFormat = audioFormat;
203 int audioFormat = 0;
228 audioFormat
284 makeWavHeader(int sampleRateInHz, int audioFormat, int channelCount, int dataLength) argument
[all...]
H A DITextToSpeechCallback.aidl65 * @param audioFormat The audio format of the generated audio in the {@link #onAudioAvailable}
71 void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount);
H A DPlaybackSynthesisCallback.java125 public int start(int sampleRateInHz, int audioFormat, int channelCount) { argument
126 if (DBG) Log.d(TAG, "start(" + sampleRateInHz + "," + audioFormat + "," + channelCount
128 if (audioFormat != AudioFormat.ENCODING_PCM_8BIT &&
129 audioFormat != AudioFormat.ENCODING_PCM_16BIT &&
130 audioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
131 Log.w(TAG, "Audio format encoding " + audioFormat + " not supported. Please use one " +
135 mDispatcher.dispatchOnBeginSynthesis(sampleRateInHz, audioFormat, channelCount);
158 mAudioParams, sampleRateInHz, audioFormat, channelCount,
H A DSynthesisCallback.java65 * @param audioFormat Audio format of the generated audio. Must be one of
74 public int start(int sampleRateInHz, @SupportedAudioFormat int audioFormat, argument
H A DUtteranceProgressListener.java94 * @param audioFormat Audio format of the generated audio. Should be one of
99 public void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat, int channelCount) { argument
H A DSynthesisPlaybackQueueItem.java67 int audioFormat, int channelCount, UtteranceProgressDispatcher dispatcher,
77 mAudioTrack = new BlockingAudioTrack(audioParams, sampleRate, audioFormat, channelCount);
66 SynthesisPlaybackQueueItem(AudioOutputParams audioParams, int sampleRate, int audioFormat, int channelCount, UtteranceProgressDispatcher dispatcher, Object callerIdentity, AbstractEventLogger logger) argument
H A DBlockingAudioTrack.java79 int audioFormat, int channelCount) {
82 mAudioFormat = audioFormat;
217 AudioFormat audioFormat = (new AudioFormat.Builder())
222 audioFormat, bufferSizeInBytes, AudioTrack.MODE_STREAM,
78 BlockingAudioTrack(AudioOutputParams audioParams, int sampleRate, int audioFormat, int channelCount) argument
H A DTextToSpeechService.java662 public void dispatchOnBeginSynthesis(int sampleRateInHz, int audioFormat, int channelCount); argument
869 public void dispatchOnBeginSynthesis(int sampleRateInHz, int audioFormat, int channelCount) { argument
872 mCallbacks.dispatchOnBeginSynthesis(getCallerIdentity(), utteranceId, sampleRateInHz, audioFormat, channelCount);
1474 public void dispatchOnBeginSynthesis(Object callerIdentity, String utteranceId, int sampleRateInHz, int audioFormat, int channelCount) { argument
1478 cb.onBeginSynthesis(utteranceId, sampleRateInHz, audioFormat, channelCount);
H A DTextToSpeech.java2139 public void onBeginSynthesis(String utteranceId, int sampleRateInHz, int audioFormat,
2143 listener.onBeginSynthesis(utteranceId, sampleRateInHz, audioFormat, channelCount);
/frameworks/base/media/java/android/media/
H A DAudioFormat.java445 public static int getBytesPerSample(int audioFormat) argument
447 switch (audioFormat) {
458 throw new IllegalArgumentException("Bad audio format " + audioFormat);
463 public static boolean isValidEncoding(int audioFormat) argument
465 switch (audioFormat) {
485 public static boolean isPublicEncoding(int audioFormat) argument
487 switch (audioFormat) {
503 public static boolean isEncodingLinearPcm(int audioFormat) argument
505 switch (audioFormat) {
523 throw new IllegalArgumentException("Bad audio format " + audioFormat);
528 isEncodingLinearFrames(int audioFormat) argument
[all...]
H A DAudioTrack.java339 * @param audioFormat the format in which the audio data is represented.
363 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, argument
366 this(streamType, sampleRateInHz, channelConfig, audioFormat,
392 * @param audioFormat the format in which the audio data is represented.
418 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, argument
427 .setEncoding(audioFormat)
807 int audioFormat, int mode) {
821 if (audioFormat == AudioFormat.ENCODING_IEC61937
874 if (audioFormat == AudioFormat.ENCODING_DEFAULT) {
875 audioFormat
806 audioParamCheck(int sampleRateInHz, int channelConfig, int channelIndexMask, int audioFormat, int mode) argument
1298 getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) argument
2765 native_setup(Object audiotrack_this, Object attributes, int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, int buffSizeInBytes, int mode, int[] sessionId, long nativeAudioTrack) argument
2835 native_get_min_buff_size( int sampleRateInHz, int channelConfig, int audioFormat) argument
[all...]
H A DAudioRecord.java265 * @param audioFormat the format in which the audio data is to be returned.
275 public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, argument
284 .setEncoding(audioFormat)
671 private void audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat) argument
696 switch (audioFormat) {
703 mAudioFormat = audioFormat;
917 * @param audioFormat the format in which the audio data is represented.
926 static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { argument
945 int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
1736 int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
1734 native_setup(Object audiorecord_this, Object attributes, int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat, int buffSizeInBytes, int[] sessionId, String opPackageName, long nativeRecordInJavaObj) argument
1772 native_get_min_buff_size( int sampleRateInHz, int channelCount, int audioFormat) argument
[all...]
/frameworks/base/core/jni/
H A Dandroid_media_AudioFormat.h43 static inline audio_format_t audioFormatToNative(int audioFormat) argument
45 switch (audioFormat) {
H A Dandroid_media_AudioTrack.cpp218 jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession,
221 ALOGV("sampleRates=%p, channel mask=%x, index mask=%x, audioFormat(Java)=%d, buffSize=%d"
223 jSampleRate, channelPositionMask, channelIndexMask, audioFormat, buffSizeInBytes,
280 audio_format_t format = audioFormatToNative(audioFormat);
282 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat);
623 static jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const T *data, argument
1059 jint sampleRateInHertz, jint channelCount, jint audioFormat) {
1069 const audio_format_t format = audioFormatToNative(audioFormat);
216 android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, jobject jaa, jintArray jSampleRate, jint channelPositionMask, jint channelIndexMask, jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession, jlong nativeAudioTrack) argument
1058 android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz, jint sampleRateInHertz, jint channelCount, jint audioFormat) argument
H A Dandroid_media_AudioRecord.cpp184 jint audioFormat, jint buffSizeInBytes, jintArray jSession, jstring opPackageName,
188 //ALOGV("sampleRate=%d, audioFormat=%d, channel mask=%x, buffSizeInBytes=%d "
190 // sampleRateInHertz, audioFormat, channelMask, buffSizeInBytes, nativeRecordInJavaObj);
248 audio_format_t format = audioFormatToNative(audioFormat);
250 ALOGE("Error creating AudioRecord: unsupported audio format %d.", audioFormat);
644 jint sampleRateInHertz, jint channelCount, jint audioFormat) {
647 sampleRateInHertz, channelCount, audioFormat);
650 audio_format_t format = audioFormatToNative(audioFormat);
182 android_media_AudioRecord_setup(JNIEnv *env, jobject thiz, jobject weak_this, jobject jaa, jintArray jSampleRate, jint channelMask, jint channelIndexMask, jint audioFormat, jint buffSizeInBytes, jintArray jSession, jstring opPackageName, jlong nativeRecordInJavaObj) argument
643 android_media_AudioRecord_get_min_buff_size(JNIEnv *env, jobject thiz, jint sampleRateInHertz, jint channelCount, jint audioFormat) argument
/frameworks/av/media/libstagefright/
H A DAudioPlayer.cpp140 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; local
143 sAudioFormatToPcmEncoding.map(pcmEncoding, &audioFormat);
147 if (mapMimeToAudioFormat(audioFormat, mime) != OK) {
149 audioFormat = AUDIO_FORMAT_INVALID;
151 ALOGV("Mime type \"%s\" mapped to audio_format 0x%x", mime, audioFormat);
155 if ((audioFormat == AUDIO_FORMAT_AAC) && format->findInt32(kKeyAACAOT, &aacaot)) {
157 mapAACProfileToAudioFormat(audioFormat,(OMX_AUDIO_AACPROFILETYPE) aacaot);
184 offloadInfo.format = audioFormat;
192 mSampleRate, numChannels, channelMask, audioFormat,
/frameworks/base/media/java/android/media/soundtrigger/
H A DSoundTriggerDetector.java121 AudioFormat audioFormat, int captureSession, byte[] data) {
125 mAudioFormat = audioFormat;
120 EventPayload(boolean triggerAvailable, boolean captureAvailable, AudioFormat audioFormat, int captureSession, byte[] data) argument
/frameworks/base/tests/Camera2Tests/SmartCamera/SimpleCamera/src/androidx/media/filterfw/decoder/
H A DMediaDecoder.java264 MediaFormat audioFormat = mMediaExtractor.getTrackFormat(mAudioTrackIndex);
265 mAudioTrackDecoder = new AudioTrackDecoder(mAudioTrackIndex, audioFormat, this);
/frameworks/base/media/java/android/media/projection/
H A DMediaProjection.java147 int audioFormat, int bufferSizeInBytes) {
145 createAudioRecord( int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes) argument
/frameworks/base/core/java/android/service/voice/
H A DAlwaysOnHotwordDetector.java218 AudioFormat audioFormat, int captureSession, byte[] data) {
222 mAudioFormat = audioFormat;
217 EventPayload(boolean triggerAvailable, boolean captureAvailable, AudioFormat audioFormat, int captureSession, byte[] data) argument
/frameworks/av/media/libmediaplayerservice/nuplayer/
H A DNuPlayer.h232 void determineAudioModeChange(const sp<AMessage> &audioFormat);
H A DNuPlayerRenderer.cpp1768 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; local
1771 status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
1779 mime.c_str(), audioFormat);
1785 if (audioFormat == AUDIO_FORMAT_AAC
1789 audioFormat,
1799 offloadInfo.format = audioFormat;
1823 audioFormat,
1870 AUDIO_FORMAT_PCM_16_BIT, // TODO: change to audioFormat
H A DNuPlayer.cpp1543 void NuPlayer::determineAudioModeChange(const sp<AMessage> &audioFormat) { argument
1566 tryOpenAudioSinkForOffload(audioFormat, audioMeta, hasVideo);
/frameworks/base/media/jni/soundpool/
H A DSoundPool.cpp504 uint32_t *rate, int *numChannels, audio_format_t *audioFormat,
516 *audioFormat = AUDIO_FORMAT_PCM_16_BIT;
503 decode(int fd, int64_t offset, int64_t length, uint32_t *rate, int *numChannels, audio_format_t *audioFormat, sp<MemoryHeapBase> heap, size_t *memsize) argument

Completed in 513 milliseconds