AudioTrack.java revision 9121a032bb11256deabca709a42f7212ea19feaa
1/* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media; 18 19import java.lang.ref.WeakReference; 20 21import android.os.Handler; 22import android.os.Looper; 23import android.os.Message; 24import android.util.Log; 25 26 27/** 28 * The AudioTrack class manages and plays a single audio resource for Java applications. 29 * It allows streaming PCM audio buffers to the audio hardware for playback. This is 30 * achieved by "pushing" the data to the AudioTrack object using one of the 31 * {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods. 32 * 33 * <p>An AudioTrack instance can operate under two modes: static or streaming.<br> 34 * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using 35 * one of the {@code write()} methods. These are blocking and return when the data has been 36 * transferred from the Java layer to the native layer and queued for playback. The streaming 37 * mode is most useful when playing blocks of audio data that for instance are: 38 * 39 * <ul> 40 * <li>too big to fit in memory because of the duration of the sound to play,</li> 41 * <li>too big to fit in memory because of the characteristics of the audio data 42 * (high sampling rate, bits per sample ...)</li> 43 * <li>received or generated while previously queued audio is playing.</li> 44 * </ul> 45 * 46 * The static mode should be chosen when dealing with short sounds that fit in memory and 47 * that need to be played with the smallest latency possible. The static mode will 48 * therefore be preferred for UI and game sounds that are played often, and with the 49 * smallest overhead possible. 50 * 51 * <p>Upon creation, an AudioTrack object initializes its associated audio buffer. 52 * The size of this buffer, specified during the construction, determines how long an AudioTrack 53 * can play before running out of data.<br> 54 * For an AudioTrack using the static mode, this size is the maximum size of the sound that can 55 * be played from it.<br> 56 * For the streaming mode, data will be written to the hardware in chunks of 57 * sizes less than or equal to the total buffer size. 58 */ 59public class AudioTrack 60{ 61 //--------------------------------------------------------- 62 // Constants 63 //-------------------- 64 /** Minimum value for a channel volume */ 65 private static final float VOLUME_MIN = 0.0f; 66 /** Maximum value for a channel volume */ 67 private static final float VOLUME_MAX = 1.0f; 68 69 /** indicates AudioTrack state is stopped */ 70 public static final int PLAYSTATE_STOPPED = 1; // matches SL_PLAYSTATE_STOPPED 71 /** indicates AudioTrack state is paused */ 72 public static final int PLAYSTATE_PAUSED = 2; // matches SL_PLAYSTATE_PAUSED 73 /** indicates AudioTrack state is playing */ 74 public static final int PLAYSTATE_PLAYING = 3; // matches SL_PLAYSTATE_PLAYING 75 76 // keep these values in sync with android_media_AudioTrack.cpp 77 /** 78 * Creation mode where audio data is transferred from Java to the native layer 79 * only once before the audio starts playing. 80 */ 81 public static final int MODE_STATIC = 0; 82 /** 83 * Creation mode where audio data is streamed from Java to the native layer 84 * as the audio is playing. 85 */ 86 public static final int MODE_STREAM = 1; 87 88 /** 89 * State of an AudioTrack that was not successfully initialized upon creation. 90 */ 91 public static final int STATE_UNINITIALIZED = 0; 92 /** 93 * State of an AudioTrack that is ready to be used. 94 */ 95 public static final int STATE_INITIALIZED = 1; 96 /** 97 * State of a successfully initialized AudioTrack that uses static data, 98 * but that hasn't received that data yet. 99 */ 100 public static final int STATE_NO_STATIC_DATA = 2; 101 102 // Error codes: 103 // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp 104 /** 105 * Denotes a successful operation. 106 */ 107 public static final int SUCCESS = 0; 108 /** 109 * Denotes a generic operation failure. 110 */ 111 public static final int ERROR = -1; 112 /** 113 * Denotes a failure due to the use of an invalid value. 114 */ 115 public static final int ERROR_BAD_VALUE = -2; 116 /** 117 * Denotes a failure due to the improper use of a method. 118 */ 119 public static final int ERROR_INVALID_OPERATION = -3; 120 121 private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16; 122 private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17; 123 private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18; 124 private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19; 125 private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20; 126 127 // Events: 128 // to keep in sync with frameworks/base/include/media/AudioTrack.h 129 /** 130 * Event id denotes when playback head has reached a previously set marker. 131 */ 132 private static final int NATIVE_EVENT_MARKER = 3; 133 /** 134 * Event id denotes when previously set update period has elapsed during playback. 135 */ 136 private static final int NATIVE_EVENT_NEW_POS = 4; 137 138 private final static String TAG = "AudioTrack-Java"; 139 140 141 //-------------------------------------------------------------------------- 142 // Member variables 143 //-------------------- 144 /** 145 * Indicates the state of the AudioTrack instance. 146 */ 147 private int mState = STATE_UNINITIALIZED; 148 /** 149 * Indicates the play state of the AudioTrack instance. 150 */ 151 private int mPlayState = PLAYSTATE_STOPPED; 152 /** 153 * Lock to make sure mPlayState updates are reflecting the actual state of the object. 154 */ 155 private final Object mPlayStateLock = new Object(); 156 /** 157 * The listener the AudioTrack notifies when the playback position reaches a marker 158 * or for periodic updates during the progression of the playback head. 159 * @see #setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener) 160 */ 161 private OnPlaybackPositionUpdateListener mPositionListener = null; 162 /** 163 * Lock to protect event listener updates against event notifications. 164 */ 165 private final Object mPositionListenerLock = new Object(); 166 /** 167 * Size of the native audio buffer. 168 */ 169 private int mNativeBufferSizeInBytes = 0; 170 /** 171 * Handler for marker events coming from the native code. 172 */ 173 private NativeEventHandlerDelegate mEventHandlerDelegate = null; 174 /** 175 * Looper associated with the thread that creates the AudioTrack instance. 176 */ 177 private Looper mInitializationLooper = null; 178 /** 179 * The audio data sampling rate in Hz. 180 */ 181 private int mSampleRate; // initialized by all constructors 182 /** 183 * The number of audio output channels (1 is mono, 2 is stereo). 184 */ 185 private int mChannelCount = 1; 186 /** 187 * The audio channel mask. 188 */ 189 private int mChannels = AudioFormat.CHANNEL_OUT_MONO; 190 191 /** 192 * The type of the audio stream to play. See 193 * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, 194 * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC}, 195 * {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and 196 * {@link AudioManager#STREAM_DTMF}. 197 */ 198 private int mStreamType = AudioManager.STREAM_MUSIC; 199 /** 200 * The way audio is consumed by the hardware, streaming or static. 201 */ 202 private int mDataLoadMode = MODE_STREAM; 203 /** 204 * The current audio channel configuration. 205 */ 206 private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO; 207 /** 208 * The encoding of the audio samples. 209 * @see AudioFormat#ENCODING_PCM_8BIT 210 * @see AudioFormat#ENCODING_PCM_16BIT 211 */ 212 private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 213 /** 214 * Audio session ID 215 */ 216 private int mSessionId = 0; 217 218 219 //-------------------------------- 220 // Used exclusively by native code 221 //-------------------- 222 /** 223 * Accessed by native methods: provides access to C++ AudioTrack object. 224 */ 225 @SuppressWarnings("unused") 226 private int mNativeTrackInJavaObj; 227 /** 228 * Accessed by native methods: provides access to the JNI data (i.e. resources used by 229 * the native AudioTrack object, but not stored in it). 230 */ 231 @SuppressWarnings("unused") 232 private int mJniData; 233 234 235 //-------------------------------------------------------------------------- 236 // Constructor, Finalize 237 //-------------------- 238 /** 239 * Class constructor. 240 * @param streamType the type of the audio stream. See 241 * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, 242 * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC}, 243 * {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}. 244 * @param sampleRateInHz the sample rate expressed in Hertz. 245 * @param channelConfig describes the configuration of the audio channels. 246 * See {@link AudioFormat#CHANNEL_OUT_MONO} and 247 * {@link AudioFormat#CHANNEL_OUT_STEREO} 248 * @param audioFormat the format in which the audio data is represented. 249 * See {@link AudioFormat#ENCODING_PCM_16BIT} and 250 * {@link AudioFormat#ENCODING_PCM_8BIT} 251 * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read 252 * from for playback. If using the AudioTrack in streaming mode, you can write data into 253 * this buffer in smaller chunks than this size. If using the AudioTrack in static mode, 254 * this is the maximum size of the sound that will be played for this instance. 255 * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size 256 * for the successful creation of an AudioTrack instance in streaming mode. Using values 257 * smaller than getMinBufferSize() will result in an initialization failure. 258 * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM} 259 * @throws java.lang.IllegalArgumentException 260 */ 261 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, 262 int bufferSizeInBytes, int mode) 263 throws IllegalArgumentException { 264 this(streamType, sampleRateInHz, channelConfig, audioFormat, 265 bufferSizeInBytes, mode, 0); 266 } 267 268 /** 269 * Class constructor with audio session. Use this constructor when the AudioTrack must be 270 * attached to a particular audio session. The primary use of the audio session ID is to 271 * associate audio effects to a particular instance of AudioTrack: if an audio session ID 272 * is provided when creating an AudioEffect, this effect will be applied only to audio tracks 273 * and media players in the same session and not to the output mix. 274 * When an AudioTrack is created without specifying a session, it will create its own session 275 * which can be retreived by calling the {@link #getAudioSessionId()} method. 276 * If a non-zero session ID is provided, this AudioTrack will share effects attached to this 277 * session 278 * with all other media players or audio tracks in the same session, otherwise a new session 279 * will be created for this track if none is supplied. 280 * @param streamType the type of the audio stream. See 281 * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM}, 282 * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC}, 283 * {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}. 284 * @param sampleRateInHz the sample rate expressed in Hertz. 285 * @param channelConfig describes the configuration of the audio channels. 286 * See {@link AudioFormat#CHANNEL_OUT_MONO} and 287 * {@link AudioFormat#CHANNEL_OUT_STEREO} 288 * @param audioFormat the format in which the audio data is represented. 289 * See {@link AudioFormat#ENCODING_PCM_16BIT} and 290 * {@link AudioFormat#ENCODING_PCM_8BIT} 291 * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read 292 * from for playback. If using the AudioTrack in streaming mode, you can write data into 293 * this buffer in smaller chunks than this size. If using the AudioTrack in static mode, 294 * this is the maximum size of the sound that will be played for this instance. 295 * See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size 296 * for the successful creation of an AudioTrack instance in streaming mode. Using values 297 * smaller than getMinBufferSize() will result in an initialization failure. 298 * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM} 299 * @param sessionId Id of audio session the AudioTrack must be attached to 300 * @throws java.lang.IllegalArgumentException 301 */ 302 public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, 303 int bufferSizeInBytes, int mode, int sessionId) 304 throws IllegalArgumentException { 305 mState = STATE_UNINITIALIZED; 306 307 // remember which looper is associated with the AudioTrack instantiation 308 if ((mInitializationLooper = Looper.myLooper()) == null) { 309 mInitializationLooper = Looper.getMainLooper(); 310 } 311 312 audioParamCheck(streamType, sampleRateInHz, channelConfig, audioFormat, mode); 313 314 audioBuffSizeCheck(bufferSizeInBytes); 315 316 if (sessionId < 0) { 317 throw (new IllegalArgumentException("Invalid audio session ID: "+sessionId)); 318 } 319 320 int[] session = new int[1]; 321 session[0] = sessionId; 322 // native initialization 323 int initResult = native_setup(new WeakReference<AudioTrack>(this), 324 mStreamType, mSampleRate, mChannels, mAudioFormat, 325 mNativeBufferSizeInBytes, mDataLoadMode, session); 326 if (initResult != SUCCESS) { 327 loge("Error code "+initResult+" when initializing AudioTrack."); 328 return; // with mState == STATE_UNINITIALIZED 329 } 330 331 mSessionId = session[0]; 332 333 if (mDataLoadMode == MODE_STATIC) { 334 mState = STATE_NO_STATIC_DATA; 335 } else { 336 mState = STATE_INITIALIZED; 337 } 338 } 339 340 // mask of all the channels supported by this implementation 341 private static final int SUPPORTED_OUT_CHANNELS = 342 AudioFormat.CHANNEL_OUT_FRONT_LEFT | 343 AudioFormat.CHANNEL_OUT_FRONT_RIGHT | 344 AudioFormat.CHANNEL_OUT_FRONT_CENTER | 345 AudioFormat.CHANNEL_OUT_LOW_FREQUENCY | 346 AudioFormat.CHANNEL_OUT_BACK_LEFT | 347 AudioFormat.CHANNEL_OUT_BACK_RIGHT | 348 AudioFormat.CHANNEL_OUT_BACK_CENTER; 349 350 // Convenience method for the constructor's parameter checks. 351 // This is where constructor IllegalArgumentException-s are thrown 352 // postconditions: 353 // mStreamType is valid 354 // mChannelCount is valid 355 // mChannels is valid 356 // mAudioFormat is valid 357 // mSampleRate is valid 358 // mDataLoadMode is valid 359 private void audioParamCheck(int streamType, int sampleRateInHz, 360 int channelConfig, int audioFormat, int mode) { 361 362 //-------------- 363 // stream type 364 if( (streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC) 365 && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM) 366 && (streamType != AudioManager.STREAM_VOICE_CALL) 367 && (streamType != AudioManager.STREAM_NOTIFICATION) 368 && (streamType != AudioManager.STREAM_BLUETOOTH_SCO) 369 && (streamType != AudioManager.STREAM_DTMF)) { 370 throw (new IllegalArgumentException("Invalid stream type.")); 371 } else { 372 mStreamType = streamType; 373 } 374 375 //-------------- 376 // sample rate, note these values are subject to change 377 if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) { 378 throw (new IllegalArgumentException(sampleRateInHz 379 + "Hz is not a supported sample rate.")); 380 } else { 381 mSampleRate = sampleRateInHz; 382 } 383 384 //-------------- 385 // channel config 386 mChannelConfiguration = channelConfig; 387 388 switch (channelConfig) { 389 case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT 390 case AudioFormat.CHANNEL_OUT_MONO: 391 case AudioFormat.CHANNEL_CONFIGURATION_MONO: 392 mChannelCount = 1; 393 mChannels = AudioFormat.CHANNEL_OUT_MONO; 394 break; 395 case AudioFormat.CHANNEL_OUT_STEREO: 396 case AudioFormat.CHANNEL_CONFIGURATION_STEREO: 397 mChannelCount = 2; 398 mChannels = AudioFormat.CHANNEL_OUT_STEREO; 399 break; 400 default: 401 if (!isMultichannelConfigSupported(channelConfig)) { 402 // input channel configuration features unsupported channels 403 mChannelCount = 0; 404 mChannels = AudioFormat.CHANNEL_INVALID; 405 mChannelConfiguration = AudioFormat.CHANNEL_INVALID; 406 throw(new IllegalArgumentException("Unsupported channel configuration.")); 407 } else { 408 mChannels = channelConfig; 409 mChannelCount = Integer.bitCount(channelConfig); 410 } 411 } 412 413 //-------------- 414 // audio format 415 switch (audioFormat) { 416 case AudioFormat.ENCODING_DEFAULT: 417 mAudioFormat = AudioFormat.ENCODING_PCM_16BIT; 418 break; 419 case AudioFormat.ENCODING_PCM_16BIT: 420 case AudioFormat.ENCODING_PCM_8BIT: 421 mAudioFormat = audioFormat; 422 break; 423 default: 424 mAudioFormat = AudioFormat.ENCODING_INVALID; 425 throw(new IllegalArgumentException("Unsupported sample encoding." 426 + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.")); 427 } 428 429 //-------------- 430 // audio load mode 431 if ( (mode != MODE_STREAM) && (mode != MODE_STATIC) ) { 432 throw(new IllegalArgumentException("Invalid mode.")); 433 } else { 434 mDataLoadMode = mode; 435 } 436 } 437 438 /** 439 * Convenience method to check that the channel configuration (a.k.a channel mask) is supported 440 * @param channelConfig the mask to validate 441 * @return false if the AudioTrack can't be used with such a mask 442 */ 443 private static boolean isMultichannelConfigSupported(int channelConfig) { 444 // check for unsupported channels 445 if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) { 446 Log.e(TAG, "Channel configuration features unsupported channels"); 447 return false; 448 } 449 // check for unsupported multichannel combinations: 450 // - FL/FR must be present 451 // - L/R channels must be paired (e.g. no single L channel) 452 final int frontPair = 453 AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT; 454 if ((channelConfig & frontPair) != frontPair) { 455 Log.e(TAG, "Front channels must be present in multichannel configurations"); 456 return false; 457 } 458 final int backPair = 459 AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT; 460 if ((channelConfig & backPair) != 0) { 461 if ((channelConfig & backPair) != backPair) { 462 Log.e(TAG, "Rear channels can't be used independently"); 463 return false; 464 } 465 } 466 return true; 467 } 468 469 470 // Convenience method for the contructor's audio buffer size check. 471 // preconditions: 472 // mChannelCount is valid 473 // mAudioFormat is valid 474 // postcondition: 475 // mNativeBufferSizeInBytes is valid (multiple of frame size, positive) 476 private void audioBuffSizeCheck(int audioBufferSize) { 477 // NB: this section is only valid with PCM data. 478 // To update when supporting compressed formats 479 int frameSizeInBytes = mChannelCount 480 * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2); 481 if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) { 482 throw (new IllegalArgumentException("Invalid audio buffer size.")); 483 } 484 485 mNativeBufferSizeInBytes = audioBufferSize; 486 } 487 488 489 /** 490 * Releases the native AudioTrack resources. 491 */ 492 public void release() { 493 // even though native_release() stops the native AudioTrack, we need to stop 494 // AudioTrack subclasses too. 495 try { 496 stop(); 497 } catch(IllegalStateException ise) { 498 // don't raise an exception, we're releasing the resources. 499 } 500 native_release(); 501 mState = STATE_UNINITIALIZED; 502 } 503 504 @Override 505 protected void finalize() { 506 native_finalize(); 507 } 508 509 //-------------------------------------------------------------------------- 510 // Getters 511 //-------------------- 512 /** 513 * Returns the minimum valid volume value. Volume values set under this one will 514 * be clamped at this value. 515 * @return the minimum volume expressed as a linear attenuation. 516 */ 517 static public float getMinVolume() { 518 return AudioTrack.VOLUME_MIN; 519 } 520 521 /** 522 * Returns the maximum valid volume value. Volume values set above this one will 523 * be clamped at this value. 524 * @return the maximum volume expressed as a linear attenuation. 525 */ 526 static public float getMaxVolume() { 527 return AudioTrack.VOLUME_MAX; 528 } 529 530 /** 531 * Returns the configured audio data sample rate in Hz 532 */ 533 public int getSampleRate() { 534 return mSampleRate; 535 } 536 537 /** 538 * Returns the current playback rate in Hz. 539 */ 540 public int getPlaybackRate() { 541 return native_get_playback_rate(); 542 } 543 544 /** 545 * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT} 546 * and {@link AudioFormat#ENCODING_PCM_8BIT}. 547 */ 548 public int getAudioFormat() { 549 return mAudioFormat; 550 } 551 552 /** 553 * Returns the type of audio stream this AudioTrack is configured for. 554 * Compare the result against {@link AudioManager#STREAM_VOICE_CALL}, 555 * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING}, 556 * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM}, 557 * {@link AudioManager#STREAM_NOTIFICATION}, or {@link AudioManager#STREAM_DTMF}. 558 */ 559 public int getStreamType() { 560 return mStreamType; 561 } 562 563 /** 564 * Returns the configured channel configuration. 565 566 * See {@link AudioFormat#CHANNEL_OUT_MONO} 567 * and {@link AudioFormat#CHANNEL_OUT_STEREO}. 568 */ 569 public int getChannelConfiguration() { 570 return mChannelConfiguration; 571 } 572 573 /** 574 * Returns the configured number of channels. 575 */ 576 public int getChannelCount() { 577 return mChannelCount; 578 } 579 580 /** 581 * Returns the state of the AudioTrack instance. This is useful after the 582 * AudioTrack instance has been created to check if it was initialized 583 * properly. This ensures that the appropriate hardware resources have been 584 * acquired. 585 * @see #STATE_INITIALIZED 586 * @see #STATE_NO_STATIC_DATA 587 * @see #STATE_UNINITIALIZED 588 */ 589 public int getState() { 590 return mState; 591 } 592 593 /** 594 * Returns the playback state of the AudioTrack instance. 595 * @see #PLAYSTATE_STOPPED 596 * @see #PLAYSTATE_PAUSED 597 * @see #PLAYSTATE_PLAYING 598 */ 599 public int getPlayState() { 600 synchronized (mPlayStateLock) { 601 return mPlayState; 602 } 603 } 604 605 /** 606 * Returns the native frame count used by the hardware. 607 */ 608 protected int getNativeFrameCount() { 609 return native_get_native_frame_count(); 610 } 611 612 /** 613 * Returns marker position expressed in frames. 614 */ 615 public int getNotificationMarkerPosition() { 616 return native_get_marker_pos(); 617 } 618 619 /** 620 * Returns the notification update period expressed in frames. 621 */ 622 public int getPositionNotificationPeriod() { 623 return native_get_pos_update_period(); 624 } 625 626 /** 627 * Returns the playback head position expressed in frames 628 */ 629 public int getPlaybackHeadPosition() { 630 return native_get_position(); 631 } 632 633 /** 634 * Returns the hardware output sample rate 635 */ 636 static public int getNativeOutputSampleRate(int streamType) { 637 return native_get_output_sample_rate(streamType); 638 } 639 640 /** 641 * Returns the minimum buffer size required for the successful creation of an AudioTrack 642 * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't 643 * guarantee a smooth playback under load, and higher values should be chosen according to 644 * the expected frequency at which the buffer will be refilled with additional data to play. 645 * @param sampleRateInHz the sample rate expressed in Hertz. 646 * @param channelConfig describes the configuration of the audio channels. 647 * See {@link AudioFormat#CHANNEL_OUT_MONO} and 648 * {@link AudioFormat#CHANNEL_OUT_STEREO} 649 * @param audioFormat the format in which the audio data is represented. 650 * See {@link AudioFormat#ENCODING_PCM_16BIT} and 651 * {@link AudioFormat#ENCODING_PCM_8BIT} 652 * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed, 653 * or {@link #ERROR} if the implementation was unable to query the hardware for its output 654 * properties, 655 * or the minimum buffer size expressed in bytes. 656 */ 657 static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { 658 int channelCount = 0; 659 switch(channelConfig) { 660 case AudioFormat.CHANNEL_OUT_MONO: 661 case AudioFormat.CHANNEL_CONFIGURATION_MONO: 662 channelCount = 1; 663 break; 664 case AudioFormat.CHANNEL_OUT_STEREO: 665 case AudioFormat.CHANNEL_CONFIGURATION_STEREO: 666 channelCount = 2; 667 break; 668 default: 669 if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) { 670 // input channel configuration features unsupported channels 671 loge("getMinBufferSize(): Invalid channel configuration."); 672 return AudioTrack.ERROR_BAD_VALUE; 673 } else { 674 channelCount = Integer.bitCount(channelConfig); 675 } 676 } 677 678 if ((audioFormat != AudioFormat.ENCODING_PCM_16BIT) 679 && (audioFormat != AudioFormat.ENCODING_PCM_8BIT)) { 680 loge("getMinBufferSize(): Invalid audio format."); 681 return AudioTrack.ERROR_BAD_VALUE; 682 } 683 684 // sample rate, note these values are subject to change 685 if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) { 686 loge("getMinBufferSize(): " + sampleRateInHz +"Hz is not a supported sample rate."); 687 return AudioTrack.ERROR_BAD_VALUE; 688 } 689 690 int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat); 691 if ((size == -1) || (size == 0)) { 692 loge("getMinBufferSize(): error querying hardware"); 693 return AudioTrack.ERROR; 694 } 695 else { 696 return size; 697 } 698 } 699 700 /** 701 * Returns the audio session ID. 702 * 703 * @return the ID of the audio session this AudioTrack belongs to. 704 */ 705 public int getAudioSessionId() { 706 return mSessionId; 707 } 708 709 //-------------------------------------------------------------------------- 710 // Initialization / configuration 711 //-------------------- 712 /** 713 * Sets the listener the AudioTrack notifies when a previously set marker is reached or 714 * for each periodic playback head position update. 715 * Notifications will be received in the same thread as the one in which the AudioTrack 716 * instance was created. 717 * @param listener 718 */ 719 public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) { 720 setPlaybackPositionUpdateListener(listener, null); 721 } 722 723 /** 724 * Sets the listener the AudioTrack notifies when a previously set marker is reached or 725 * for each periodic playback head position update. 726 * Use this method to receive AudioTrack events in the Handler associated with another 727 * thread than the one in which you created the AudioTrack instance. 728 * @param listener 729 * @param handler the Handler that will receive the event notification messages. 730 */ 731 public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener, 732 Handler handler) { 733 synchronized (mPositionListenerLock) { 734 mPositionListener = listener; 735 } 736 if (listener != null) { 737 mEventHandlerDelegate = new NativeEventHandlerDelegate(this, handler); 738 } 739 740 } 741 742 743 744 /** 745 * Sets the specified left/right output volume values on the AudioTrack. Values are clamped 746 * to the ({@link #getMinVolume()}, {@link #getMaxVolume()}) interval if outside this range. 747 * @param leftVolume output attenuation for the left channel. A value of 0.0f is silence, 748 * a value of 1.0f is no attenuation. 749 * @param rightVolume output attenuation for the right channel 750 * @return error code or success, see {@link #SUCCESS}, 751 * {@link #ERROR_INVALID_OPERATION} 752 */ 753 public int setStereoVolume(float leftVolume, float rightVolume) { 754 if (mState != STATE_INITIALIZED) { 755 return ERROR_INVALID_OPERATION; 756 } 757 758 // clamp the volumes 759 if (leftVolume < getMinVolume()) { 760 leftVolume = getMinVolume(); 761 } 762 if (leftVolume > getMaxVolume()) { 763 leftVolume = getMaxVolume(); 764 } 765 if (rightVolume < getMinVolume()) { 766 rightVolume = getMinVolume(); 767 } 768 if (rightVolume > getMaxVolume()) { 769 rightVolume = getMaxVolume(); 770 } 771 772 native_setVolume(leftVolume, rightVolume); 773 774 return SUCCESS; 775 } 776 777 778 /** 779 * Similar, except set volume of all channels to same value. 780 * @hide 781 */ 782 public int setVolume(float volume) { 783 return setStereoVolume(volume, volume); 784 } 785 786 787 /** 788 * Sets the playback sample rate for this track. This sets the sampling rate at which 789 * the audio data will be consumed and played back, not the original sampling rate of the 790 * content. Setting it to half the sample rate of the content will cause the playback to 791 * last twice as long, but will also result in a negative pitch shift. 792 * The valid sample rate range is from 1Hz to twice the value returned by 793 * {@link #getNativeOutputSampleRate(int)}. 794 * @param sampleRateInHz the sample rate expressed in Hz 795 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 796 * {@link #ERROR_INVALID_OPERATION} 797 */ 798 public int setPlaybackRate(int sampleRateInHz) { 799 if (mState != STATE_INITIALIZED) { 800 return ERROR_INVALID_OPERATION; 801 } 802 if (sampleRateInHz <= 0) { 803 return ERROR_BAD_VALUE; 804 } 805 return native_set_playback_rate(sampleRateInHz); 806 } 807 808 809 /** 810 * Sets the position of the notification marker. 811 * @param markerInFrames marker in frames 812 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 813 * {@link #ERROR_INVALID_OPERATION} 814 */ 815 public int setNotificationMarkerPosition(int markerInFrames) { 816 if (mState != STATE_INITIALIZED) { 817 return ERROR_INVALID_OPERATION; 818 } 819 return native_set_marker_pos(markerInFrames); 820 } 821 822 823 /** 824 * Sets the period for the periodic notification event. 825 * @param periodInFrames update period expressed in frames 826 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION} 827 */ 828 public int setPositionNotificationPeriod(int periodInFrames) { 829 if (mState != STATE_INITIALIZED) { 830 return ERROR_INVALID_OPERATION; 831 } 832 return native_set_pos_update_period(periodInFrames); 833 } 834 835 836 /** 837 * Sets the playback head position. The track must be stopped for the position to be changed. 838 * @param positionInFrames playback head position expressed in frames 839 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 840 * {@link #ERROR_INVALID_OPERATION} 841 */ 842 public int setPlaybackHeadPosition(int positionInFrames) { 843 synchronized(mPlayStateLock) { 844 if ((mPlayState == PLAYSTATE_STOPPED) || (mPlayState == PLAYSTATE_PAUSED)) { 845 return native_set_position(positionInFrames); 846 } else { 847 return ERROR_INVALID_OPERATION; 848 } 849 } 850 } 851 852 /** 853 * Sets the loop points and the loop count. The loop can be infinite. 854 * @param startInFrames loop start marker expressed in frames 855 * @param endInFrames loop end marker expressed in frames 856 * @param loopCount the number of times the loop is looped. 857 * A value of -1 means infinite looping. 858 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 859 * {@link #ERROR_INVALID_OPERATION} 860 */ 861 public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) { 862 if (mDataLoadMode == MODE_STREAM) { 863 return ERROR_INVALID_OPERATION; 864 } 865 return native_set_loop(startInFrames, endInFrames, loopCount); 866 } 867 868 /** 869 * Sets the initialization state of the instance. To be used in an AudioTrack subclass 870 * constructor to set a subclass-specific post-initialization state. 871 * @param state the state of the AudioTrack instance 872 */ 873 protected void setState(int state) { 874 mState = state; 875 } 876 877 878 //--------------------------------------------------------- 879 // Transport control methods 880 //-------------------- 881 /** 882 * Starts playing an AudioTrack. 883 * 884 * @throws IllegalStateException 885 */ 886 public void play() 887 throws IllegalStateException { 888 if (mState != STATE_INITIALIZED) { 889 throw(new IllegalStateException("play() called on uninitialized AudioTrack.")); 890 } 891 892 synchronized(mPlayStateLock) { 893 native_start(); 894 mPlayState = PLAYSTATE_PLAYING; 895 } 896 } 897 898 /** 899 * Stops playing the audio data. 900 * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing 901 * after the last buffer that was written has been played. For an immediate stop, use 902 * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played 903 * back yet. 904 * @throws IllegalStateException 905 */ 906 public void stop() 907 throws IllegalStateException { 908 if (mState != STATE_INITIALIZED) { 909 throw(new IllegalStateException("stop() called on uninitialized AudioTrack.")); 910 } 911 912 // stop playing 913 synchronized(mPlayStateLock) { 914 native_stop(); 915 mPlayState = PLAYSTATE_STOPPED; 916 } 917 } 918 919 /** 920 * Pauses the playback of the audio data. Data that has not been played 921 * back will not be discarded. Subsequent calls to {@link #play} will play 922 * this data back. See {@link #flush()} to discard this data. 923 * 924 * @throws IllegalStateException 925 */ 926 public void pause() 927 throws IllegalStateException { 928 if (mState != STATE_INITIALIZED) { 929 throw(new IllegalStateException("pause() called on uninitialized AudioTrack.")); 930 } 931 //logd("pause()"); 932 933 // pause playback 934 synchronized(mPlayStateLock) { 935 native_pause(); 936 mPlayState = PLAYSTATE_PAUSED; 937 } 938 } 939 940 941 //--------------------------------------------------------- 942 // Audio data supply 943 //-------------------- 944 945 /** 946 * Flushes the audio data currently queued for playback. Any data that has 947 * not been played back will be discarded. 948 */ 949 public void flush() { 950 if (mState == STATE_INITIALIZED) { 951 // flush the data in native layer 952 native_flush(); 953 } 954 955 } 956 957 /** 958 * Writes the audio data to the audio hardware for playback. Will block until 959 * all data has been written to the audio mixer. 960 * Note that the actual playback of this data might occur after this function 961 * returns. This function is thread safe with respect to {@link #stop} calls, 962 * in which case all of the specified data might not be written to the mixer. 963 * 964 * @param audioData the array that holds the data to play. 965 * @param offsetInBytes the offset expressed in bytes in audioData where the data to play 966 * starts. 967 * @param sizeInBytes the number of bytes to read in audioData after the offset. 968 * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION} 969 * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if 970 * the parameters don't resolve to valid data and indexes. 971 */ 972 973 public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) { 974 if ((mDataLoadMode == MODE_STATIC) 975 && (mState == STATE_NO_STATIC_DATA) 976 && (sizeInBytes > 0)) { 977 mState = STATE_INITIALIZED; 978 } 979 980 if (mState != STATE_INITIALIZED) { 981 return ERROR_INVALID_OPERATION; 982 } 983 984 if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0) 985 || (offsetInBytes + sizeInBytes > audioData.length)) { 986 return ERROR_BAD_VALUE; 987 } 988 989 return native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat); 990 } 991 992 993 /** 994 * Writes the audio data to the audio hardware for playback. Will block until 995 * all data has been written to the audio mixer. 996 * Note that the actual playback of this data might occur after this function 997 * returns. This function is thread safe with respect to {@link #stop} calls, 998 * in which case all of the specified data might not be written to the mixer. 999 * 1000 * @param audioData the array that holds the data to play. 1001 * @param offsetInShorts the offset expressed in shorts in audioData where the data to play 1002 * starts. 1003 * @param sizeInShorts the number of bytes to read in audioData after the offset. 1004 * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION} 1005 * if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if 1006 * the parameters don't resolve to valid data and indexes. 1007 */ 1008 1009 public int write(short[] audioData, int offsetInShorts, int sizeInShorts) { 1010 if ((mDataLoadMode == MODE_STATIC) 1011 && (mState == STATE_NO_STATIC_DATA) 1012 && (sizeInShorts > 0)) { 1013 mState = STATE_INITIALIZED; 1014 } 1015 1016 if (mState != STATE_INITIALIZED) { 1017 return ERROR_INVALID_OPERATION; 1018 } 1019 1020 if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0) 1021 || (offsetInShorts + sizeInShorts > audioData.length)) { 1022 return ERROR_BAD_VALUE; 1023 } 1024 1025 return native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat); 1026 } 1027 1028 1029 /** 1030 * Notifies the native resource to reuse the audio data already loaded in the native 1031 * layer. This call is only valid with AudioTrack instances that don't use the streaming 1032 * model. 1033 * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE}, 1034 * {@link #ERROR_INVALID_OPERATION} 1035 */ 1036 public int reloadStaticData() { 1037 if (mDataLoadMode == MODE_STREAM) { 1038 return ERROR_INVALID_OPERATION; 1039 } 1040 return native_reload_static(); 1041 } 1042 1043 //-------------------------------------------------------------------------- 1044 // Audio effects management 1045 //-------------------- 1046 1047 /** 1048 * Attaches an auxiliary effect to the audio track. A typical auxiliary 1049 * effect is a reverberation effect which can be applied on any sound source 1050 * that directs a certain amount of its energy to this effect. This amount 1051 * is defined by setAuxEffectSendLevel(). 1052 * {@see #setAuxEffectSendLevel(float)}. 1053 * <p>After creating an auxiliary effect (e.g. 1054 * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with 1055 * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling 1056 * this method to attach the audio track to the effect. 1057 * <p>To detach the effect from the audio track, call this method with a 1058 * null effect id. 1059 * 1060 * @param effectId system wide unique id of the effect to attach 1061 * @return error code or success, see {@link #SUCCESS}, 1062 * {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE} 1063 */ 1064 public int attachAuxEffect(int effectId) { 1065 if (mState != STATE_INITIALIZED) { 1066 return ERROR_INVALID_OPERATION; 1067 } 1068 return native_attachAuxEffect(effectId); 1069 } 1070 1071 /** 1072 * Sets the send level of the audio track to the attached auxiliary effect 1073 * {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0. 1074 * <p>By default the send level is 0, so even if an effect is attached to the player 1075 * this method must be called for the effect to be applied. 1076 * <p>Note that the passed level value is a raw scalar. UI controls should be scaled 1077 * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB, 1078 * so an appropriate conversion from linear UI input x to level is: 1079 * x == 0 -> level = 0 1080 * 0 < x <= R -> level = 10^(72*(x-R)/20/R) 1081 * 1082 * @param level send level scalar 1083 * @return error code or success, see {@link #SUCCESS}, 1084 * {@link #ERROR_INVALID_OPERATION} 1085 */ 1086 public int setAuxEffectSendLevel(float level) { 1087 if (mState != STATE_INITIALIZED) { 1088 return ERROR_INVALID_OPERATION; 1089 } 1090 // clamp the level 1091 if (level < getMinVolume()) { 1092 level = getMinVolume(); 1093 } 1094 if (level > getMaxVolume()) { 1095 level = getMaxVolume(); 1096 } 1097 native_setAuxEffectSendLevel(level); 1098 return SUCCESS; 1099 } 1100 1101 //--------------------------------------------------------- 1102 // Interface definitions 1103 //-------------------- 1104 /** 1105 * Interface definition for a callback to be invoked when the playback head position of 1106 * an AudioTrack has reached a notification marker or has increased by a certain period. 1107 */ 1108 public interface OnPlaybackPositionUpdateListener { 1109 /** 1110 * Called on the listener to notify it that the previously set marker has been reached 1111 * by the playback head. 1112 */ 1113 void onMarkerReached(AudioTrack track); 1114 1115 /** 1116 * Called on the listener to periodically notify it that the playback head has reached 1117 * a multiple of the notification period. 1118 */ 1119 void onPeriodicNotification(AudioTrack track); 1120 } 1121 1122 1123 //--------------------------------------------------------- 1124 // Inner classes 1125 //-------------------- 1126 /** 1127 * Helper class to handle the forwarding of native events to the appropriate listener 1128 * (potentially) handled in a different thread 1129 */ 1130 private class NativeEventHandlerDelegate { 1131 private final AudioTrack mAudioTrack; 1132 private final Handler mHandler; 1133 1134 NativeEventHandlerDelegate(AudioTrack track, Handler handler) { 1135 mAudioTrack = track; 1136 // find the looper for our new event handler 1137 Looper looper; 1138 if (handler != null) { 1139 looper = handler.getLooper(); 1140 } else { 1141 // no given handler, use the looper the AudioTrack was created in 1142 looper = mInitializationLooper; 1143 } 1144 1145 // construct the event handler with this looper 1146 if (looper != null) { 1147 // implement the event handler delegate 1148 mHandler = new Handler(looper) { 1149 @Override 1150 public void handleMessage(Message msg) { 1151 if (mAudioTrack == null) { 1152 return; 1153 } 1154 OnPlaybackPositionUpdateListener listener = null; 1155 synchronized (mPositionListenerLock) { 1156 listener = mAudioTrack.mPositionListener; 1157 } 1158 switch(msg.what) { 1159 case NATIVE_EVENT_MARKER: 1160 if (listener != null) { 1161 listener.onMarkerReached(mAudioTrack); 1162 } 1163 break; 1164 case NATIVE_EVENT_NEW_POS: 1165 if (listener != null) { 1166 listener.onPeriodicNotification(mAudioTrack); 1167 } 1168 break; 1169 default: 1170 Log.e(TAG, "[ android.media.AudioTrack.NativeEventHandler ] " + 1171 "Unknown event type: " + msg.what); 1172 break; 1173 } 1174 } 1175 }; 1176 } else { 1177 mHandler = null; 1178 } 1179 } 1180 1181 Handler getHandler() { 1182 return mHandler; 1183 } 1184 } 1185 1186 1187 //--------------------------------------------------------- 1188 // Java methods called from the native side 1189 //-------------------- 1190 @SuppressWarnings("unused") 1191 private static void postEventFromNative(Object audiotrack_ref, 1192 int what, int arg1, int arg2, Object obj) { 1193 //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2); 1194 AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get(); 1195 if (track == null) { 1196 return; 1197 } 1198 1199 if (track.mEventHandlerDelegate != null) { 1200 Message m = 1201 track.mEventHandlerDelegate.getHandler().obtainMessage(what, arg1, arg2, obj); 1202 track.mEventHandlerDelegate.getHandler().sendMessage(m); 1203 } 1204 1205 } 1206 1207 1208 //--------------------------------------------------------- 1209 // Native methods called from the Java side 1210 //-------------------- 1211 1212 private native final int native_setup(Object audiotrack_this, 1213 int streamType, int sampleRate, int nbChannels, int audioFormat, 1214 int buffSizeInBytes, int mode, int[] sessionId); 1215 1216 private native final void native_finalize(); 1217 1218 private native final void native_release(); 1219 1220 private native final void native_start(); 1221 1222 private native final void native_stop(); 1223 1224 private native final void native_pause(); 1225 1226 private native final void native_flush(); 1227 1228 private native final int native_write_byte(byte[] audioData, 1229 int offsetInBytes, int sizeInBytes, int format); 1230 1231 private native final int native_write_short(short[] audioData, 1232 int offsetInShorts, int sizeInShorts, int format); 1233 1234 private native final int native_reload_static(); 1235 1236 private native final int native_get_native_frame_count(); 1237 1238 private native final void native_setVolume(float leftVolume, float rightVolume); 1239 1240 private native final int native_set_playback_rate(int sampleRateInHz); 1241 private native final int native_get_playback_rate(); 1242 1243 private native final int native_set_marker_pos(int marker); 1244 private native final int native_get_marker_pos(); 1245 1246 private native final int native_set_pos_update_period(int updatePeriod); 1247 private native final int native_get_pos_update_period(); 1248 1249 private native final int native_set_position(int position); 1250 private native final int native_get_position(); 1251 1252 private native final int native_set_loop(int start, int end, int loopCount); 1253 1254 static private native final int native_get_output_sample_rate(int streamType); 1255 static private native final int native_get_min_buff_size( 1256 int sampleRateInHz, int channelConfig, int audioFormat); 1257 1258 private native final int native_get_session_id(); 1259 1260 private native final int native_attachAuxEffect(int effectId); 1261 private native final void native_setAuxEffectSendLevel(float level); 1262 1263 //--------------------------------------------------------- 1264 // Utility methods 1265 //------------------ 1266 1267 private static void logd(String msg) { 1268 Log.d(TAG, "[ android.media.AudioTrack ] " + msg); 1269 } 1270 1271 private static void loge(String msg) { 1272 Log.e(TAG, "[ android.media.AudioTrack ] " + msg); 1273 } 1274 1275} 1276