AudioTrack.java revision 4896cb59a443370252d230793ee030021f719821
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import java.lang.ref.WeakReference;
20
21import android.os.Handler;
22import android.os.Looper;
23import android.os.Message;
24import android.util.Log;
25
26
27/**
28 * The AudioTrack class manages and plays a single audio resource for Java applications.
29 * It allows streaming of PCM audio buffers to the audio sink for playback. This is
30 * achieved by "pushing" the data to the AudioTrack object using one of the
31 *  {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
32 *
33 * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
34 * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
35 * one of the {@code write()} methods. These are blocking and return when the data has been
36 * transferred from the Java layer to the native layer and queued for playback. The streaming
37 * mode is most useful when playing blocks of audio data that for instance are:
38 *
39 * <ul>
40 *   <li>too big to fit in memory because of the duration of the sound to play,</li>
41 *   <li>too big to fit in memory because of the characteristics of the audio data
42 *         (high sampling rate, bits per sample ...)</li>
43 *   <li>received or generated while previously queued audio is playing.</li>
44 * </ul>
45 *
46 * The static mode should be chosen when dealing with short sounds that fit in memory and
47 * that need to be played with the smallest latency possible. The static mode will
48 * therefore be preferred for UI and game sounds that are played often, and with the
49 * smallest overhead possible.
50 *
51 * <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
52 * The size of this buffer, specified during the construction, determines how long an AudioTrack
53 * can play before running out of data.<br>
54 * For an AudioTrack using the static mode, this size is the maximum size of the sound that can
55 * be played from it.<br>
56 * For the streaming mode, data will be written to the audio sink in chunks of
57 * sizes less than or equal to the total buffer size.
58 *
59 * AudioTrack is not final and thus permits subclasses, but such use is not recommended.
60 */
61public class AudioTrack
62{
63    //---------------------------------------------------------
64    // Constants
65    //--------------------
66    /** Minimum value for a channel volume */
67    private static final float VOLUME_MIN = 0.0f;
68    /** Maximum value for a channel volume */
69    private static final float VOLUME_MAX = 1.0f;
70
71    /** Minimum value for sample rate */
72    private static final int SAMPLE_RATE_HZ_MIN = 4000;
73    /** Maximum value for sample rate */
74    private static final int SAMPLE_RATE_HZ_MAX = 48000;
75
76    /** indicates AudioTrack state is stopped */
77    public static final int PLAYSTATE_STOPPED = 1;  // matches SL_PLAYSTATE_STOPPED
78    /** indicates AudioTrack state is paused */
79    public static final int PLAYSTATE_PAUSED  = 2;  // matches SL_PLAYSTATE_PAUSED
80    /** indicates AudioTrack state is playing */
81    public static final int PLAYSTATE_PLAYING = 3;  // matches SL_PLAYSTATE_PLAYING
82
83    // keep these values in sync with android_media_AudioTrack.cpp
84    /**
85     * Creation mode where audio data is transferred from Java to the native layer
86     * only once before the audio starts playing.
87     */
88    public static final int MODE_STATIC = 0;
89    /**
90     * Creation mode where audio data is streamed from Java to the native layer
91     * as the audio is playing.
92     */
93    public static final int MODE_STREAM = 1;
94
95    /**
96     * State of an AudioTrack that was not successfully initialized upon creation.
97     */
98    public static final int STATE_UNINITIALIZED = 0;
99    /**
100     * State of an AudioTrack that is ready to be used.
101     */
102    public static final int STATE_INITIALIZED   = 1;
103    /**
104     * State of a successfully initialized AudioTrack that uses static data,
105     * but that hasn't received that data yet.
106     */
107    public static final int STATE_NO_STATIC_DATA = 2;
108
109    // Error codes:
110    // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
111    /**
112     * Denotes a successful operation.
113     */
114    public  static final int SUCCESS                               = 0;
115    /**
116     * Denotes a generic operation failure.
117     */
118    public  static final int ERROR                                 = -1;
119    /**
120     * Denotes a failure due to the use of an invalid value.
121     */
122    public  static final int ERROR_BAD_VALUE                       = -2;
123    /**
124     * Denotes a failure due to the improper use of a method.
125     */
126    public  static final int ERROR_INVALID_OPERATION               = -3;
127
128    private static final int ERROR_NATIVESETUP_AUDIOSYSTEM         = -16;
129    private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK  = -17;
130    private static final int ERROR_NATIVESETUP_INVALIDFORMAT       = -18;
131    private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE   = -19;
132    private static final int ERROR_NATIVESETUP_NATIVEINITFAILED    = -20;
133
134    // Events:
135    // to keep in sync with frameworks/av/include/media/AudioTrack.h
136    /**
137     * Event id denotes when playback head has reached a previously set marker.
138     */
139    private static final int NATIVE_EVENT_MARKER  = 3;
140    /**
141     * Event id denotes when previously set update period has elapsed during playback.
142     */
143    private static final int NATIVE_EVENT_NEW_POS = 4;
144
145    private final static String TAG = "android.media.AudioTrack";
146
147
148    //--------------------------------------------------------------------------
149    // Member variables
150    //--------------------
151    /**
152     * Indicates the state of the AudioTrack instance.
153     */
154    private int mState = STATE_UNINITIALIZED;
155    /**
156     * Indicates the play state of the AudioTrack instance.
157     */
158    private int mPlayState = PLAYSTATE_STOPPED;
159    /**
160     * Lock to make sure mPlayState updates are reflecting the actual state of the object.
161     */
162    private final Object mPlayStateLock = new Object();
163    /**
164     * Sizes of the native audio buffer.
165     */
166    private int mNativeBufferSizeInBytes = 0;
167    private int mNativeBufferSizeInFrames = 0;
168    /**
169     * Handler for events coming from the native code.
170     */
171    private NativeEventHandlerDelegate mEventHandlerDelegate;
172    /**
173     * Looper associated with the thread that creates the AudioTrack instance.
174     */
175    private final Looper mInitializationLooper;
176    /**
177     * The audio data source sampling rate in Hz.
178     */
179    private int mSampleRate; // initialized by all constructors
180    /**
181     * The number of audio output channels (1 is mono, 2 is stereo).
182     */
183    private int mChannelCount = 1;
184    /**
185     * The audio channel mask.
186     */
187    private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
188
189    /**
190     * The type of the audio stream to play. See
191     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
192     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
193     *   {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and
194     *   {@link AudioManager#STREAM_DTMF}.
195     */
196    private int mStreamType = AudioManager.STREAM_MUSIC;
197    /**
198     * The way audio is consumed by the audio sink, streaming or static.
199     */
200    private int mDataLoadMode = MODE_STREAM;
201    /**
202     * The current audio channel configuration.
203     */
204    private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
205    /**
206     * The encoding of the audio samples.
207     * @see AudioFormat#ENCODING_PCM_8BIT
208     * @see AudioFormat#ENCODING_PCM_16BIT
209     */
210    private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
211    /**
212     * Audio session ID
213     */
214    private int mSessionId = AudioSystem.AUDIO_SESSION_ALLOCATE;
215
216
217    //--------------------------------
218    // Used exclusively by native code
219    //--------------------
220    /**
221     * Accessed by native methods: provides access to C++ AudioTrack object.
222     */
223    @SuppressWarnings("unused")
224    private int mNativeTrackInJavaObj;
225    /**
226     * Accessed by native methods: provides access to the JNI data (i.e. resources used by
227     * the native AudioTrack object, but not stored in it).
228     */
229    @SuppressWarnings("unused")
230    private int mJniData;
231
232
233    //--------------------------------------------------------------------------
234    // Constructor, Finalize
235    //--------------------
236    /**
237     * Class constructor.
238     * @param streamType the type of the audio stream. See
239     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
240     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
241     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
242     * @param sampleRateInHz the initial source sample rate expressed in Hz.
243     * @param channelConfig describes the configuration of the audio channels.
244     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
245     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
246     * @param audioFormat the format in which the audio data is represented.
247     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
248     *   {@link AudioFormat#ENCODING_PCM_8BIT}
249     * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
250     *   read from for playback.
251     *   If track's creation mode is {@link #MODE_STREAM}, you can write data into
252     *   this buffer in chunks less than or equal to this size, and it is typical to use
253     *   chunks of 1/2 of the total size to permit double-buffering.
254     *   If the track's creation mode is {@link #MODE_STATIC},
255     *   this is the maximum length sample, or audio clip, that can be played by this instance.
256     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
257     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
258     *   smaller than getMinBufferSize() will result in an initialization failure.
259     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
260     * @throws java.lang.IllegalArgumentException
261     */
262    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
263            int bufferSizeInBytes, int mode)
264    throws IllegalArgumentException {
265        this(streamType, sampleRateInHz, channelConfig, audioFormat,
266                bufferSizeInBytes, mode, AudioSystem.AUDIO_SESSION_ALLOCATE);
267    }
268
269    /**
270     * Class constructor with audio session. Use this constructor when the AudioTrack must be
271     * attached to a particular audio session. The primary use of the audio session ID is to
272     * associate audio effects to a particular instance of AudioTrack: if an audio session ID
273     * is provided when creating an AudioEffect, this effect will be applied only to audio tracks
274     * and media players in the same session and not to the output mix.
275     * When an AudioTrack is created without specifying a session, it will create its own session
276     * which can be retrieved by calling the {@link #getAudioSessionId()} method.
277     * If a non-zero session ID is provided, this AudioTrack will share effects attached to this
278     * session
279     * with all other media players or audio tracks in the same session, otherwise a new session
280     * will be created for this track if none is supplied.
281     * @param streamType the type of the audio stream. See
282     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
283     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
284     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
285     * @param sampleRateInHz the initial source sample rate expressed in Hz.
286     * @param channelConfig describes the configuration of the audio channels.
287     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
288     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
289     * @param audioFormat the format in which the audio data is represented.
290     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
291     *   {@link AudioFormat#ENCODING_PCM_8BIT}
292     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
293     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
294     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
295     *   this is the maximum size of the sound that will be played for this instance.
296     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
297     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
298     *   smaller than getMinBufferSize() will result in an initialization failure.
299     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
300     * @param sessionId Id of audio session the AudioTrack must be attached to
301     * @throws java.lang.IllegalArgumentException
302     */
303    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
304            int bufferSizeInBytes, int mode, int sessionId)
305    throws IllegalArgumentException {
306        // mState already == STATE_UNINITIALIZED
307
308        // remember which looper is associated with the AudioTrack instantiation
309        Looper looper;
310        if ((looper = Looper.myLooper()) == null) {
311            looper = Looper.getMainLooper();
312        }
313        mInitializationLooper = looper;
314
315        audioParamCheck(streamType, sampleRateInHz, channelConfig, audioFormat, mode);
316
317        audioBuffSizeCheck(bufferSizeInBytes);
318
319        if (sessionId < 0) {
320            throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
321        }
322
323        int[] session = new int[1];
324        session[0] = sessionId;
325        // native initialization
326        int initResult = native_setup(new WeakReference<AudioTrack>(this),
327                mStreamType, mSampleRate, mChannels, mAudioFormat,
328                mNativeBufferSizeInBytes, mDataLoadMode, session);
329        if (initResult != SUCCESS) {
330            loge("Error code "+initResult+" when initializing AudioTrack.");
331            return; // with mState == STATE_UNINITIALIZED
332        }
333
334        mSessionId = session[0];
335
336        if (mDataLoadMode == MODE_STATIC) {
337            mState = STATE_NO_STATIC_DATA;
338        } else {
339            mState = STATE_INITIALIZED;
340        }
341    }
342
343    // mask of all the channels supported by this implementation
344    private static final int SUPPORTED_OUT_CHANNELS =
345            AudioFormat.CHANNEL_OUT_FRONT_LEFT |
346            AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
347            AudioFormat.CHANNEL_OUT_FRONT_CENTER |
348            AudioFormat.CHANNEL_OUT_LOW_FREQUENCY |
349            AudioFormat.CHANNEL_OUT_BACK_LEFT |
350            AudioFormat.CHANNEL_OUT_BACK_RIGHT |
351            AudioFormat.CHANNEL_OUT_BACK_CENTER;
352
353    // Convenience method for the constructor's parameter checks.
354    // This is where constructor IllegalArgumentException-s are thrown
355    // postconditions:
356    //    mStreamType is valid
357    //    mChannelCount is valid
358    //    mChannels is valid
359    //    mAudioFormat is valid
360    //    mSampleRate is valid
361    //    mDataLoadMode is valid
362    private void audioParamCheck(int streamType, int sampleRateInHz,
363                                 int channelConfig, int audioFormat, int mode) {
364
365        //--------------
366        // stream type
367        if( (streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC)
368           && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
369           && (streamType != AudioManager.STREAM_VOICE_CALL)
370           && (streamType != AudioManager.STREAM_NOTIFICATION)
371           && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
372           && (streamType != AudioManager.STREAM_DTMF)) {
373            throw new IllegalArgumentException("Invalid stream type.");
374        }
375        mStreamType = streamType;
376
377        //--------------
378        // sample rate, note these values are subject to change
379        if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
380            throw new IllegalArgumentException(sampleRateInHz
381                    + "Hz is not a supported sample rate.");
382        }
383        mSampleRate = sampleRateInHz;
384
385        //--------------
386        // channel config
387        mChannelConfiguration = channelConfig;
388
389        switch (channelConfig) {
390        case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
391        case AudioFormat.CHANNEL_OUT_MONO:
392        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
393            mChannelCount = 1;
394            mChannels = AudioFormat.CHANNEL_OUT_MONO;
395            break;
396        case AudioFormat.CHANNEL_OUT_STEREO:
397        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
398            mChannelCount = 2;
399            mChannels = AudioFormat.CHANNEL_OUT_STEREO;
400            break;
401        default:
402            if (!isMultichannelConfigSupported(channelConfig)) {
403                // input channel configuration features unsupported channels
404                throw new IllegalArgumentException("Unsupported channel configuration.");
405            }
406            mChannels = channelConfig;
407            mChannelCount = Integer.bitCount(channelConfig);
408        }
409
410        //--------------
411        // audio format
412        switch (audioFormat) {
413        case AudioFormat.ENCODING_DEFAULT:
414            mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
415            break;
416        case AudioFormat.ENCODING_PCM_16BIT:
417        case AudioFormat.ENCODING_PCM_8BIT:
418            mAudioFormat = audioFormat;
419            break;
420        default:
421            throw new IllegalArgumentException("Unsupported sample encoding."
422                + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
423        }
424
425        //--------------
426        // audio load mode
427        if ( (mode != MODE_STREAM) && (mode != MODE_STATIC) ) {
428            throw new IllegalArgumentException("Invalid mode.");
429        }
430        mDataLoadMode = mode;
431    }
432
433    /**
434     * Convenience method to check that the channel configuration (a.k.a channel mask) is supported
435     * @param channelConfig the mask to validate
436     * @return false if the AudioTrack can't be used with such a mask
437     */
438    private static boolean isMultichannelConfigSupported(int channelConfig) {
439        // check for unsupported channels
440        if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
441            loge("Channel configuration features unsupported channels");
442            return false;
443        }
444        // check for unsupported multichannel combinations:
445        // - FL/FR must be present
446        // - L/R channels must be paired (e.g. no single L channel)
447        final int frontPair =
448                AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
449        if ((channelConfig & frontPair) != frontPair) {
450                loge("Front channels must be present in multichannel configurations");
451                return false;
452        }
453        final int backPair =
454                AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
455        if ((channelConfig & backPair) != 0) {
456            if ((channelConfig & backPair) != backPair) {
457                loge("Rear channels can't be used independently");
458                return false;
459            }
460        }
461        return true;
462    }
463
464
465    // Convenience method for the constructor's audio buffer size check.
466    // preconditions:
467    //    mChannelCount is valid
468    //    mAudioFormat is valid
469    // postcondition:
470    //    mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
471    private void audioBuffSizeCheck(int audioBufferSize) {
472        // NB: this section is only valid with PCM data.
473        //     To update when supporting compressed formats
474        int frameSizeInBytes = mChannelCount
475                * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
476        if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
477            throw new IllegalArgumentException("Invalid audio buffer size.");
478        }
479
480        mNativeBufferSizeInBytes = audioBufferSize;
481        mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
482    }
483
484
485    /**
486     * Releases the native AudioTrack resources.
487     */
488    public void release() {
489        // even though native_release() stops the native AudioTrack, we need to stop
490        // AudioTrack subclasses too.
491        try {
492            stop();
493        } catch(IllegalStateException ise) {
494            // don't raise an exception, we're releasing the resources.
495        }
496        native_release();
497        mState = STATE_UNINITIALIZED;
498    }
499
500    @Override
501    protected void finalize() {
502        native_finalize();
503    }
504
505    //--------------------------------------------------------------------------
506    // Getters
507    //--------------------
508    /**
509     * Returns the minimum valid volume value. Volume values set under this one will
510     * be clamped at this value.
511     * @return the minimum volume expressed as a linear attenuation.
512     */
513    static public float getMinVolume() {
514        return VOLUME_MIN;
515    }
516
517    /**
518     * Returns the maximum valid volume value. Volume values set above this one will
519     * be clamped at this value.
520     * @return the maximum volume expressed as a linear attenuation.
521     */
522    static public float getMaxVolume() {
523        return VOLUME_MAX;
524    }
525
526    /**
527     * Returns the configured audio data sample rate in Hz
528     */
529    public int getSampleRate() {
530        return mSampleRate;
531    }
532
533    /**
534     * Returns the current playback rate in Hz.
535     */
536    public int getPlaybackRate() {
537        return native_get_playback_rate();
538    }
539
540    /**
541     * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
542     * and {@link AudioFormat#ENCODING_PCM_8BIT}.
543     */
544    public int getAudioFormat() {
545        return mAudioFormat;
546    }
547
548    /**
549     * Returns the type of audio stream this AudioTrack is configured for.
550     * Compare the result against {@link AudioManager#STREAM_VOICE_CALL},
551     * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
552     * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
553     * {@link AudioManager#STREAM_NOTIFICATION}, or {@link AudioManager#STREAM_DTMF}.
554     */
555    public int getStreamType() {
556        return mStreamType;
557    }
558
559    /**
560     * Returns the configured channel configuration.
561     * See {@link AudioFormat#CHANNEL_OUT_MONO}
562     * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
563     */
564    public int getChannelConfiguration() {
565        return mChannelConfiguration;
566    }
567
568    /**
569     * Returns the configured number of channels.
570     */
571    public int getChannelCount() {
572        return mChannelCount;
573    }
574
575    /**
576     * Returns the state of the AudioTrack instance. This is useful after the
577     * AudioTrack instance has been created to check if it was initialized
578     * properly. This ensures that the appropriate resources have been acquired.
579     * @see #STATE_INITIALIZED
580     * @see #STATE_NO_STATIC_DATA
581     * @see #STATE_UNINITIALIZED
582     */
583    public int getState() {
584        return mState;
585    }
586
587    /**
588     * Returns the playback state of the AudioTrack instance.
589     * @see #PLAYSTATE_STOPPED
590     * @see #PLAYSTATE_PAUSED
591     * @see #PLAYSTATE_PLAYING
592     */
593    public int getPlayState() {
594        synchronized (mPlayStateLock) {
595            return mPlayState;
596        }
597    }
598
599    /**
600     *  Returns the "native frame count", derived from the bufferSizeInBytes specified at
601     *  creation time and converted to frame units.
602     *  If track's creation mode is {@link #MODE_STATIC},
603     *  it is equal to the specified bufferSizeInBytes converted to frame units.
604     *  If track's creation mode is {@link #MODE_STREAM},
605     *  it is typically greater than or equal to the specified bufferSizeInBytes converted to frame
606     *  units; it may be rounded up to a larger value if needed by the target device implementation.
607     *  @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
608     *  See {@link AudioManager#getProperty(String)} for key
609     *  {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
610     */
611    @Deprecated
612    protected int getNativeFrameCount() {
613        return native_get_native_frame_count();
614    }
615
616    /**
617     * Returns marker position expressed in frames.
618     * @return marker position in wrapping frame units similar to {@link #getPlaybackHeadPosition},
619     * or zero if marker is disabled.
620     */
621    public int getNotificationMarkerPosition() {
622        return native_get_marker_pos();
623    }
624
625    /**
626     * Returns the notification update period expressed in frames.
627     * Zero means that no position update notifications are being delivered.
628     */
629    public int getPositionNotificationPeriod() {
630        return native_get_pos_update_period();
631    }
632
633    /**
634     * Returns the playback head position expressed in frames.
635     * Though the "int" type is signed 32-bits, the value should be reinterpreted as if it is
636     * unsigned 32-bits.  That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
637     * This is a continuously advancing counter.  It will wrap (overflow) periodically,
638     * for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
639     * It is reset to zero by flush(), reload(), and stop().
640     */
641    public int getPlaybackHeadPosition() {
642        return native_get_position();
643    }
644
645    /**
646     * Returns this track's estimated latency in milliseconds. This includes the latency due
647     * to AudioTrack buffer size, AudioMixer (if any) and audio hardware driver.
648     *
649     * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
650     * a better solution.
651     * @hide
652     */
653    public int getLatency() {
654        return native_get_latency();
655    }
656
657    /**
658     *  Returns the output sample rate in Hz for the specified stream type.
659     */
660    static public int getNativeOutputSampleRate(int streamType) {
661        return native_get_output_sample_rate(streamType);
662    }
663
664    /**
665     * Returns the minimum buffer size required for the successful creation of an AudioTrack
666     * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
667     * guarantee a smooth playback under load, and higher values should be chosen according to
668     * the expected frequency at which the buffer will be refilled with additional data to play.
669     * For example, if you intend to dynamically set the source sample rate of an AudioTrack
670     * to a higher value than the initial source sample rate, be sure to configure the buffer size
671     * based on the highest planned sample rate.
672     * @param sampleRateInHz the source sample rate expressed in Hz.
673     * @param channelConfig describes the configuration of the audio channels.
674     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
675     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
676     * @param audioFormat the format in which the audio data is represented.
677     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
678     *   {@link AudioFormat#ENCODING_PCM_8BIT}
679     * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
680     *   or {@link #ERROR} if unable to query for output properties,
681     *   or the minimum buffer size expressed in bytes.
682     */
683    static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
684        int channelCount = 0;
685        switch(channelConfig) {
686        case AudioFormat.CHANNEL_OUT_MONO:
687        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
688            channelCount = 1;
689            break;
690        case AudioFormat.CHANNEL_OUT_STEREO:
691        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
692            channelCount = 2;
693            break;
694        default:
695            if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
696                // input channel configuration features unsupported channels
697                loge("getMinBufferSize(): Invalid channel configuration.");
698                return ERROR_BAD_VALUE;
699            } else {
700                channelCount = Integer.bitCount(channelConfig);
701            }
702        }
703
704        if ((audioFormat != AudioFormat.ENCODING_PCM_16BIT)
705            && (audioFormat != AudioFormat.ENCODING_PCM_8BIT)) {
706            loge("getMinBufferSize(): Invalid audio format.");
707            return ERROR_BAD_VALUE;
708        }
709
710        // sample rate, note these values are subject to change
711        if ( (sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX) ) {
712            loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
713            return ERROR_BAD_VALUE;
714        }
715
716        int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
717        if (size <= 0) {
718            loge("getMinBufferSize(): error querying hardware");
719            return ERROR;
720        }
721        else {
722            return size;
723        }
724    }
725
726    /**
727     * Returns the audio session ID.
728     *
729     * @return the ID of the audio session this AudioTrack belongs to.
730     */
731    public int getAudioSessionId() {
732        return mSessionId;
733    }
734
735   /**
736    * Poll for a timestamp on demand.
737    *
738    * Use if you need to get the most recent timestamp outside of the event callback handler.
739    * Calling this method too often may be inefficient;
740    * if you need a high-resolution mapping between frame position and presentation time,
741    * consider implementing that at application level, based on low-resolution timestamps.
742    * The audio data at the returned position may either already have been
743    * presented, or may have not yet been presented but is committed to be presented.
744    * It is not possible to request the time corresponding to a particular position,
745    * or to request the (fractional) position corresponding to a particular time.
746    * If you need such features, consider implementing them at application level.
747    *
748    * @param timestamp a reference to a non-null AudioTimestamp instance allocated
749    *        and owned by caller.
750    * @return true if a timestamp is available, or false if no timestamp is available.
751    *         If a timestamp if available,
752    *         the AudioTimestamp instance is filled in with a position in frame units, together
753    *         with the estimated time when that frame was presented or is committed to
754    *         be presented.
755    *         In the case that no timestamp is available, any supplied instance is left unaltered.
756    */
757    public boolean getTimestamp(AudioTimestamp timestamp)
758    {
759        if (timestamp == null) {
760            throw new IllegalArgumentException();
761        }
762        // It's unfortunate, but we have to either create garbage every time or use synchronized
763        long[] longArray = new long[2];
764        int ret = native_get_timestamp(longArray);
765        if (ret != SUCCESS) {
766            return false;
767        }
768        timestamp.framePosition = longArray[0];
769        timestamp.nanoTime = longArray[1];
770        return true;
771    }
772
773
774    //--------------------------------------------------------------------------
775    // Initialization / configuration
776    //--------------------
777    /**
778     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
779     * for each periodic playback head position update.
780     * Notifications will be received in the same thread as the one in which the AudioTrack
781     * instance was created.
782     * @param listener
783     */
784    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
785        setPlaybackPositionUpdateListener(listener, null);
786    }
787
788    /**
789     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
790     * for each periodic playback head position update.
791     * Use this method to receive AudioTrack events in the Handler associated with another
792     * thread than the one in which you created the AudioTrack instance.
793     * @param listener
794     * @param handler the Handler that will receive the event notification messages.
795     */
796    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
797                                                    Handler handler) {
798        if (listener != null) {
799            mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
800        } else {
801            mEventHandlerDelegate = null;
802        }
803    }
804
805
806
807     /**
808     * Sets the specified left/right output volume values on the AudioTrack. Values are clamped
809     * to the ({@link #getMinVolume()}, {@link #getMaxVolume()}) interval if outside this range.
810     * @param leftVolume output attenuation for the left channel. A value of 0.0f is silence,
811     *      a value of 1.0f is no attenuation.
812     * @param rightVolume output attenuation for the right channel
813     * @return error code or success, see {@link #SUCCESS},
814     *    {@link #ERROR_INVALID_OPERATION}
815     */
816    public int setStereoVolume(float leftVolume, float rightVolume) {
817        if (mState == STATE_UNINITIALIZED) {
818            return ERROR_INVALID_OPERATION;
819        }
820
821        // clamp the volumes
822        if (leftVolume < getMinVolume()) {
823            leftVolume = getMinVolume();
824        }
825        if (leftVolume > getMaxVolume()) {
826            leftVolume = getMaxVolume();
827        }
828        if (rightVolume < getMinVolume()) {
829            rightVolume = getMinVolume();
830        }
831        if (rightVolume > getMaxVolume()) {
832            rightVolume = getMaxVolume();
833        }
834
835        native_setVolume(leftVolume, rightVolume);
836
837        return SUCCESS;
838    }
839
840
841    /**
842     * Similar, except set volume of all channels to same value.
843     * @hide
844     */
845    public int setVolume(float volume) {
846        return setStereoVolume(volume, volume);
847    }
848
849
850    /**
851     * Sets the playback sample rate for this track. This sets the sampling rate at which
852     * the audio data will be consumed and played back
853     * (as set by the sampleRateInHz parameter in the
854     * {@link #AudioTrack(int, int, int, int, int, int)} constructor),
855     * not the original sampling rate of the
856     * content. For example, setting it to half the sample rate of the content will cause the
857     * playback to last twice as long, but will also result in a pitch shift down by one octave.
858     * The valid sample rate range is from 1 Hz to twice the value returned by
859     * {@link #getNativeOutputSampleRate(int)}.
860     * @param sampleRateInHz the sample rate expressed in Hz
861     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
862     *    {@link #ERROR_INVALID_OPERATION}
863     */
864    public int setPlaybackRate(int sampleRateInHz) {
865        if (mState != STATE_INITIALIZED) {
866            return ERROR_INVALID_OPERATION;
867        }
868        if (sampleRateInHz <= 0) {
869            return ERROR_BAD_VALUE;
870        }
871        return native_set_playback_rate(sampleRateInHz);
872    }
873
874
875    /**
876     * Sets the position of the notification marker.  At most one marker can be active.
877     * @param markerInFrames marker position in wrapping frame units similar to
878     * {@link #getPlaybackHeadPosition}, or zero to disable the marker.
879     * To set a marker at a position which would appear as zero due to wraparound,
880     * a workaround is to use a non-zero position near zero, such as -1 or 1.
881     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
882     *  {@link #ERROR_INVALID_OPERATION}
883     */
884    public int setNotificationMarkerPosition(int markerInFrames) {
885        if (mState == STATE_UNINITIALIZED) {
886            return ERROR_INVALID_OPERATION;
887        }
888        return native_set_marker_pos(markerInFrames);
889    }
890
891
892    /**
893     * Sets the period for the periodic notification event.
894     * @param periodInFrames update period expressed in frames
895     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
896     */
897    public int setPositionNotificationPeriod(int periodInFrames) {
898        if (mState == STATE_UNINITIALIZED) {
899            return ERROR_INVALID_OPERATION;
900        }
901        return native_set_pos_update_period(periodInFrames);
902    }
903
904
905    /**
906     * Sets the playback head position.
907     * The track must be stopped or paused for the position to be changed,
908     * and must use the {@link #MODE_STATIC} mode.
909     * @param positionInFrames playback head position expressed in frames
910     * Zero corresponds to start of buffer.
911     * The position must not be greater than the buffer size in frames, or negative.
912     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
913     *    {@link #ERROR_INVALID_OPERATION}
914     */
915    public int setPlaybackHeadPosition(int positionInFrames) {
916        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
917                getPlayState() == PLAYSTATE_PLAYING) {
918            return ERROR_INVALID_OPERATION;
919        }
920        if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
921            return ERROR_BAD_VALUE;
922        }
923        return native_set_position(positionInFrames);
924    }
925
926    /**
927     * Sets the loop points and the loop count. The loop can be infinite.
928     * Similarly to setPlaybackHeadPosition,
929     * the track must be stopped or paused for the loop points to be changed,
930     * and must use the {@link #MODE_STATIC} mode.
931     * @param startInFrames loop start marker expressed in frames
932     * Zero corresponds to start of buffer.
933     * The start marker must not be greater than or equal to the buffer size in frames, or negative.
934     * @param endInFrames loop end marker expressed in frames
935     * The total buffer size in frames corresponds to end of buffer.
936     * The end marker must not be greater than the buffer size in frames.
937     * For looping, the end marker must not be less than or equal to the start marker,
938     * but to disable looping
939     * it is permitted for start marker, end marker, and loop count to all be 0.
940     * @param loopCount the number of times the loop is looped.
941     *    A value of -1 means infinite looping, and 0 disables looping.
942     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
943     *    {@link #ERROR_INVALID_OPERATION}
944     */
945    public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
946        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
947                getPlayState() == PLAYSTATE_PLAYING) {
948            return ERROR_INVALID_OPERATION;
949        }
950        if (loopCount == 0) {
951            ;   // explicitly allowed as an exception to the loop region range check
952        } else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames &&
953                startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
954            return ERROR_BAD_VALUE;
955        }
956        return native_set_loop(startInFrames, endInFrames, loopCount);
957    }
958
959    /**
960     * Sets the initialization state of the instance. This method was originally intended to be used
961     * in an AudioTrack subclass constructor to set a subclass-specific post-initialization state.
962     * However, subclasses of AudioTrack are no longer recommended, so this method is obsolete.
963     * @param state the state of the AudioTrack instance
964     * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
965     */
966    @Deprecated
967    protected void setState(int state) {
968        mState = state;
969    }
970
971
972    //---------------------------------------------------------
973    // Transport control methods
974    //--------------------
975    /**
976     * Starts playing an AudioTrack.
977     * If track's creation mode is {@link #MODE_STATIC}, you must have called write() prior.
978     *
979     * @throws IllegalStateException
980     */
981    public void play()
982    throws IllegalStateException {
983        if (mState != STATE_INITIALIZED) {
984            throw new IllegalStateException("play() called on uninitialized AudioTrack.");
985        }
986
987        synchronized(mPlayStateLock) {
988            native_start();
989            mPlayState = PLAYSTATE_PLAYING;
990        }
991    }
992
993    /**
994     * Stops playing the audio data.
995     * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing
996     * after the last buffer that was written has been played. For an immediate stop, use
997     * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played
998     * back yet.
999     * @throws IllegalStateException
1000     */
1001    public void stop()
1002    throws IllegalStateException {
1003        if (mState != STATE_INITIALIZED) {
1004            throw new IllegalStateException("stop() called on uninitialized AudioTrack.");
1005        }
1006
1007        // stop playing
1008        synchronized(mPlayStateLock) {
1009            native_stop();
1010            mPlayState = PLAYSTATE_STOPPED;
1011        }
1012    }
1013
1014    /**
1015     * Pauses the playback of the audio data. Data that has not been played
1016     * back will not be discarded. Subsequent calls to {@link #play} will play
1017     * this data back. See {@link #flush()} to discard this data.
1018     *
1019     * @throws IllegalStateException
1020     */
1021    public void pause()
1022    throws IllegalStateException {
1023        if (mState != STATE_INITIALIZED) {
1024            throw new IllegalStateException("pause() called on uninitialized AudioTrack.");
1025        }
1026        //logd("pause()");
1027
1028        // pause playback
1029        synchronized(mPlayStateLock) {
1030            native_pause();
1031            mPlayState = PLAYSTATE_PAUSED;
1032        }
1033    }
1034
1035
1036    //---------------------------------------------------------
1037    // Audio data supply
1038    //--------------------
1039
1040    /**
1041     * Flushes the audio data currently queued for playback. Any data that has
1042     * not been played back will be discarded.  No-op if not stopped or paused,
1043     * or if the track's creation mode is not {@link #MODE_STREAM}.
1044     */
1045    public void flush() {
1046        if (mState == STATE_INITIALIZED) {
1047            // flush the data in native layer
1048            native_flush();
1049        }
1050
1051    }
1052
1053    /**
1054     * Writes the audio data to the audio sink for playback (streaming mode),
1055     * or copies audio data for later playback (static buffer mode).
1056     * In streaming mode, will block until all data has been written to the audio sink.
1057     * In static buffer mode, copies the data to the buffer starting at offset 0.
1058     * Note that the actual playback of this data might occur after this function
1059     * returns. This function is thread safe with respect to {@link #stop} calls,
1060     * in which case all of the specified data might not be written to the audio sink.
1061     *
1062     * @param audioData the array that holds the data to play.
1063     * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
1064     *    starts.
1065     * @param sizeInBytes the number of bytes to read in audioData after the offset.
1066     * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
1067     *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1068     *    the parameters don't resolve to valid data and indexes.
1069     */
1070
1071    public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
1072
1073        if (mState == STATE_UNINITIALIZED) {
1074            return ERROR_INVALID_OPERATION;
1075        }
1076
1077        if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
1078                || (offsetInBytes + sizeInBytes < 0)    // detect integer overflow
1079                || (offsetInBytes + sizeInBytes > audioData.length)) {
1080            return ERROR_BAD_VALUE;
1081        }
1082
1083        int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat);
1084
1085        if ((mDataLoadMode == MODE_STATIC)
1086                && (mState == STATE_NO_STATIC_DATA)
1087                && (ret > 0)) {
1088            // benign race with respect to other APIs that read mState
1089            mState = STATE_INITIALIZED;
1090        }
1091
1092        return ret;
1093    }
1094
1095
1096    /**
1097     * Writes the audio data to the audio sink for playback (streaming mode),
1098     * or copies audio data for later playback (static buffer mode).
1099     * In streaming mode, will block until all data has been written to the audio sink.
1100     * In static buffer mode, copies the data to the buffer starting at offset 0.
1101     * Note that the actual playback of this data might occur after this function
1102     * returns. This function is thread safe with respect to {@link #stop} calls,
1103     * in which case all of the specified data might not be written to the audio sink.
1104     *
1105     * @param audioData the array that holds the data to play.
1106     * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
1107     *     starts.
1108     * @param sizeInShorts the number of shorts to read in audioData after the offset.
1109     * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
1110      *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1111      *    the parameters don't resolve to valid data and indexes.
1112     */
1113
1114    public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
1115
1116        if (mState == STATE_UNINITIALIZED) {
1117            return ERROR_INVALID_OPERATION;
1118        }
1119
1120        if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
1121                || (offsetInShorts + sizeInShorts < 0)  // detect integer overflow
1122                || (offsetInShorts + sizeInShorts > audioData.length)) {
1123            return ERROR_BAD_VALUE;
1124        }
1125
1126        int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
1127
1128        if ((mDataLoadMode == MODE_STATIC)
1129                && (mState == STATE_NO_STATIC_DATA)
1130                && (ret > 0)) {
1131            // benign race with respect to other APIs that read mState
1132            mState = STATE_INITIALIZED;
1133        }
1134
1135        return ret;
1136    }
1137
1138
1139    /**
1140     * Notifies the native resource to reuse the audio data already loaded in the native
1141     * layer, that is to rewind to start of buffer.
1142     * The track's creation mode must be {@link #MODE_STATIC}.
1143     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1144     *  {@link #ERROR_INVALID_OPERATION}
1145     */
1146    public int reloadStaticData() {
1147        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
1148            return ERROR_INVALID_OPERATION;
1149        }
1150        return native_reload_static();
1151    }
1152
1153    //--------------------------------------------------------------------------
1154    // Audio effects management
1155    //--------------------
1156
1157    /**
1158     * Attaches an auxiliary effect to the audio track. A typical auxiliary
1159     * effect is a reverberation effect which can be applied on any sound source
1160     * that directs a certain amount of its energy to this effect. This amount
1161     * is defined by setAuxEffectSendLevel().
1162     * {@see #setAuxEffectSendLevel(float)}.
1163     * <p>After creating an auxiliary effect (e.g.
1164     * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
1165     * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling
1166     * this method to attach the audio track to the effect.
1167     * <p>To detach the effect from the audio track, call this method with a
1168     * null effect id.
1169     *
1170     * @param effectId system wide unique id of the effect to attach
1171     * @return error code or success, see {@link #SUCCESS},
1172     *    {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE}
1173     */
1174    public int attachAuxEffect(int effectId) {
1175        if (mState == STATE_UNINITIALIZED) {
1176            return ERROR_INVALID_OPERATION;
1177        }
1178        return native_attachAuxEffect(effectId);
1179    }
1180
1181    /**
1182     * Sets the send level of the audio track to the attached auxiliary effect
1183     * {@link #attachAuxEffect(int)}.  The level value range is 0.0f to 1.0f.
1184     * Values are clamped to the (0.0f, 1.0f) interval if outside this range.
1185     * <p>By default the send level is 0.0f, so even if an effect is attached to the player
1186     * this method must be called for the effect to be applied.
1187     * <p>Note that the passed level value is a raw scalar. UI controls should be scaled
1188     * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
1189     * so an appropriate conversion from linear UI input x to level is:
1190     * x == 0 -&gt; level = 0
1191     * 0 &lt; x &lt;= R -&gt; level = 10^(72*(x-R)/20/R)
1192     *
1193     * @param level send level scalar
1194     * @return error code or success, see {@link #SUCCESS},
1195     *    {@link #ERROR_INVALID_OPERATION}
1196     */
1197    public int setAuxEffectSendLevel(float level) {
1198        if (mState == STATE_UNINITIALIZED) {
1199            return ERROR_INVALID_OPERATION;
1200        }
1201        // clamp the level
1202        if (level < getMinVolume()) {
1203            level = getMinVolume();
1204        }
1205        if (level > getMaxVolume()) {
1206            level = getMaxVolume();
1207        }
1208        native_setAuxEffectSendLevel(level);
1209        return SUCCESS;
1210    }
1211
1212    //---------------------------------------------------------
1213    // Interface definitions
1214    //--------------------
1215    /**
1216     * Interface definition for a callback to be invoked when the playback head position of
1217     * an AudioTrack has reached a notification marker or has increased by a certain period.
1218     */
1219    public interface OnPlaybackPositionUpdateListener  {
1220        /**
1221         * Called on the listener to notify it that the previously set marker has been reached
1222         * by the playback head.
1223         */
1224        void onMarkerReached(AudioTrack track);
1225
1226        /**
1227         * Called on the listener to periodically notify it that the playback head has reached
1228         * a multiple of the notification period.
1229         */
1230        void onPeriodicNotification(AudioTrack track);
1231    }
1232
1233
1234    //---------------------------------------------------------
1235    // Inner classes
1236    //--------------------
1237    /**
1238     * Helper class to handle the forwarding of native events to the appropriate listener
1239     * (potentially) handled in a different thread
1240     */
1241    private class NativeEventHandlerDelegate {
1242        private final Handler mHandler;
1243
1244        NativeEventHandlerDelegate(final AudioTrack track,
1245                                   final OnPlaybackPositionUpdateListener listener,
1246                                   Handler handler) {
1247            // find the looper for our new event handler
1248            Looper looper;
1249            if (handler != null) {
1250                looper = handler.getLooper();
1251            } else {
1252                // no given handler, use the looper the AudioTrack was created in
1253                looper = mInitializationLooper;
1254            }
1255
1256            // construct the event handler with this looper
1257            if (looper != null) {
1258                // implement the event handler delegate
1259                mHandler = new Handler(looper) {
1260                    @Override
1261                    public void handleMessage(Message msg) {
1262                        if (track == null) {
1263                            return;
1264                        }
1265                        switch(msg.what) {
1266                        case NATIVE_EVENT_MARKER:
1267                            if (listener != null) {
1268                                listener.onMarkerReached(track);
1269                            }
1270                            break;
1271                        case NATIVE_EVENT_NEW_POS:
1272                            if (listener != null) {
1273                                listener.onPeriodicNotification(track);
1274                            }
1275                            break;
1276                        default:
1277                            loge("Unknown native event type: " + msg.what);
1278                            break;
1279                        }
1280                    }
1281                };
1282            } else {
1283                mHandler = null;
1284            }
1285        }
1286
1287        Handler getHandler() {
1288            return mHandler;
1289        }
1290    }
1291
1292
1293    //---------------------------------------------------------
1294    // Java methods called from the native side
1295    //--------------------
1296    @SuppressWarnings("unused")
1297    private static void postEventFromNative(Object audiotrack_ref,
1298            int what, int arg1, int arg2, Object obj) {
1299        //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
1300        AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get();
1301        if (track == null) {
1302            return;
1303        }
1304
1305        NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
1306        if (delegate != null) {
1307            Handler handler = delegate.getHandler();
1308            if (handler != null) {
1309                Message m = handler.obtainMessage(what, arg1, arg2, obj);
1310                handler.sendMessage(m);
1311            }
1312        }
1313
1314    }
1315
1316
1317    //---------------------------------------------------------
1318    // Native methods called from the Java side
1319    //--------------------
1320
1321    private native final int native_setup(Object audiotrack_this,
1322            int streamType, int sampleRate, int channelMask, int audioFormat,
1323            int buffSizeInBytes, int mode, int[] sessionId);
1324
1325    private native final void native_finalize();
1326
1327    private native final void native_release();
1328
1329    private native final void native_start();
1330
1331    private native final void native_stop();
1332
1333    private native final void native_pause();
1334
1335    private native final void native_flush();
1336
1337    private native final int native_write_byte(byte[] audioData,
1338                                               int offsetInBytes, int sizeInBytes, int format);
1339
1340    private native final int native_write_short(short[] audioData,
1341                                                int offsetInShorts, int sizeInShorts, int format);
1342
1343    private native final int native_reload_static();
1344
1345    private native final int native_get_native_frame_count();
1346
1347    private native final void native_setVolume(float leftVolume, float rightVolume);
1348
1349    private native final int native_set_playback_rate(int sampleRateInHz);
1350    private native final int native_get_playback_rate();
1351
1352    private native final int native_set_marker_pos(int marker);
1353    private native final int native_get_marker_pos();
1354
1355    private native final int native_set_pos_update_period(int updatePeriod);
1356    private native final int native_get_pos_update_period();
1357
1358    private native final int native_set_position(int position);
1359    private native final int native_get_position();
1360
1361    private native final int native_get_latency();
1362
1363    // longArray must be a non-null array of length >= 2
1364    // [0] is assigned the frame position
1365    // [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
1366    private native final int native_get_timestamp(long[] longArray);
1367
1368    private native final int native_set_loop(int start, int end, int loopCount);
1369
1370    static private native final int native_get_output_sample_rate(int streamType);
1371    static private native final int native_get_min_buff_size(
1372            int sampleRateInHz, int channelConfig, int audioFormat);
1373
1374    private native final int native_attachAuxEffect(int effectId);
1375    private native final void native_setAuxEffectSendLevel(float level);
1376
1377    //---------------------------------------------------------
1378    // Utility methods
1379    //------------------
1380
1381    private static void logd(String msg) {
1382        Log.d(TAG, msg);
1383    }
1384
1385    private static void loge(String msg) {
1386        Log.e(TAG, msg);
1387    }
1388
1389}
1390