AudioTrack.java revision 1af30c7ac480e5d335f267a3ac3b2e6c748ce240
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import java.lang.annotation.Retention;
20import java.lang.annotation.RetentionPolicy;
21import java.lang.ref.WeakReference;
22import java.nio.ByteBuffer;
23import java.nio.NioUtils;
24
25import android.annotation.IntDef;
26import android.app.ActivityThread;
27import android.app.AppOpsManager;
28import android.content.Context;
29import android.os.Handler;
30import android.os.IBinder;
31import android.os.Looper;
32import android.os.Message;
33import android.os.Process;
34import android.os.RemoteException;
35import android.os.ServiceManager;
36import android.util.Log;
37
38import com.android.internal.app.IAppOpsService;
39
40
41/**
42 * The AudioTrack class manages and plays a single audio resource for Java applications.
43 * It allows streaming of PCM audio buffers to the audio sink for playback. This is
44 * achieved by "pushing" the data to the AudioTrack object using one of the
45 *  {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
46 *
47 * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
48 * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
49 * one of the {@code write()} methods. These are blocking and return when the data has been
50 * transferred from the Java layer to the native layer and queued for playback. The streaming
51 * mode is most useful when playing blocks of audio data that for instance are:
52 *
53 * <ul>
54 *   <li>too big to fit in memory because of the duration of the sound to play,</li>
55 *   <li>too big to fit in memory because of the characteristics of the audio data
56 *         (high sampling rate, bits per sample ...)</li>
57 *   <li>received or generated while previously queued audio is playing.</li>
58 * </ul>
59 *
60 * The static mode should be chosen when dealing with short sounds that fit in memory and
61 * that need to be played with the smallest latency possible. The static mode will
62 * therefore be preferred for UI and game sounds that are played often, and with the
63 * smallest overhead possible.
64 *
65 * <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
66 * The size of this buffer, specified during the construction, determines how long an AudioTrack
67 * can play before running out of data.<br>
68 * For an AudioTrack using the static mode, this size is the maximum size of the sound that can
69 * be played from it.<br>
70 * For the streaming mode, data will be written to the audio sink in chunks of
71 * sizes less than or equal to the total buffer size.
72 *
73 * AudioTrack is not final and thus permits subclasses, but such use is not recommended.
74 */
75public class AudioTrack
76{
77    //---------------------------------------------------------
78    // Constants
79    //--------------------
80    /** Minimum value for a channel volume */
81    private static final float VOLUME_MIN = 0.0f;
82    /** Maximum value for a channel volume */
83    private static final float VOLUME_MAX = 1.0f;
84
85    /** Minimum value for sample rate */
86    private static final int SAMPLE_RATE_HZ_MIN = 4000;
87    /** Maximum value for sample rate */
88    private static final int SAMPLE_RATE_HZ_MAX = 48000;
89
90    /** indicates AudioTrack state is stopped */
91    public static final int PLAYSTATE_STOPPED = 1;  // matches SL_PLAYSTATE_STOPPED
92    /** indicates AudioTrack state is paused */
93    public static final int PLAYSTATE_PAUSED  = 2;  // matches SL_PLAYSTATE_PAUSED
94    /** indicates AudioTrack state is playing */
95    public static final int PLAYSTATE_PLAYING = 3;  // matches SL_PLAYSTATE_PLAYING
96
97    // keep these values in sync with android_media_AudioTrack.cpp
98    /**
99     * Creation mode where audio data is transferred from Java to the native layer
100     * only once before the audio starts playing.
101     */
102    public static final int MODE_STATIC = 0;
103    /**
104     * Creation mode where audio data is streamed from Java to the native layer
105     * as the audio is playing.
106     */
107    public static final int MODE_STREAM = 1;
108
109    /**
110     * State of an AudioTrack that was not successfully initialized upon creation.
111     */
112    public static final int STATE_UNINITIALIZED = 0;
113    /**
114     * State of an AudioTrack that is ready to be used.
115     */
116    public static final int STATE_INITIALIZED   = 1;
117    /**
118     * State of a successfully initialized AudioTrack that uses static data,
119     * but that hasn't received that data yet.
120     */
121    public static final int STATE_NO_STATIC_DATA = 2;
122
123    // Error codes:
124    // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
125    /**
126     * Denotes a successful operation.
127     */
128    public  static final int SUCCESS                               = 0;
129    /**
130     * Denotes a generic operation failure.
131     */
132    public  static final int ERROR                                 = -1;
133    /**
134     * Denotes a failure due to the use of an invalid value.
135     */
136    public  static final int ERROR_BAD_VALUE                       = -2;
137    /**
138     * Denotes a failure due to the improper use of a method.
139     */
140    public  static final int ERROR_INVALID_OPERATION               = -3;
141
142    private static final int ERROR_NATIVESETUP_AUDIOSYSTEM         = -16;
143    private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK  = -17;
144    private static final int ERROR_NATIVESETUP_INVALIDFORMAT       = -18;
145    private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE   = -19;
146    private static final int ERROR_NATIVESETUP_NATIVEINITFAILED    = -20;
147
148    // Events:
149    // to keep in sync with frameworks/av/include/media/AudioTrack.h
150    /**
151     * Event id denotes when playback head has reached a previously set marker.
152     */
153    private static final int NATIVE_EVENT_MARKER  = 3;
154    /**
155     * Event id denotes when previously set update period has elapsed during playback.
156     */
157    private static final int NATIVE_EVENT_NEW_POS = 4;
158
159    private final static String TAG = "android.media.AudioTrack";
160
161
162    /** @hide */
163    @IntDef({
164        WRITE_BLOCKING,
165        WRITE_NON_BLOCKING
166    })
167    @Retention(RetentionPolicy.SOURCE)
168    public @interface WriteMode {}
169
170    /**
171     * @hide CANDIDATE FOR PUBLIC API
172     * The write mode indicating the write operation will block until all data has been written,
173     * to be used in {@link #write(ByteBuffer, int, int, int)}.
174     */
175    public final static int WRITE_BLOCKING = 0;
176    /**
177     * @hide CANDIDATE FOR PUBLIC API
178     * The write mode indicating the write operation will return immediately after
179     * queuing as much audio data for playback as possible without blocking, to be used in
180     * {@link #write(ByteBuffer, int, int, int)}.
181     */
182    public final static int WRITE_NON_BLOCKING = 1;
183
184    //--------------------------------------------------------------------------
185    // Member variables
186    //--------------------
187    /**
188     * Indicates the state of the AudioTrack instance.
189     */
190    private int mState = STATE_UNINITIALIZED;
191    /**
192     * Indicates the play state of the AudioTrack instance.
193     */
194    private int mPlayState = PLAYSTATE_STOPPED;
195    /**
196     * Lock to make sure mPlayState updates are reflecting the actual state of the object.
197     */
198    private final Object mPlayStateLock = new Object();
199    /**
200     * Sizes of the native audio buffer.
201     */
202    private int mNativeBufferSizeInBytes = 0;
203    private int mNativeBufferSizeInFrames = 0;
204    /**
205     * Handler for events coming from the native code.
206     */
207    private NativeEventHandlerDelegate mEventHandlerDelegate;
208    /**
209     * Looper associated with the thread that creates the AudioTrack instance.
210     */
211    private final Looper mInitializationLooper;
212    /**
213     * The audio data source sampling rate in Hz.
214     */
215    private int mSampleRate; // initialized by all constructors
216    /**
217     * The number of audio output channels (1 is mono, 2 is stereo).
218     */
219    private int mChannelCount = 1;
220    /**
221     * The audio channel mask.
222     */
223    private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
224
225    /**
226     * The type of the audio stream to play. See
227     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
228     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
229     *   {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and
230     *   {@link AudioManager#STREAM_DTMF}.
231     */
232    private int mStreamType = AudioManager.STREAM_MUSIC;
233    /**
234     * The way audio is consumed by the audio sink, streaming or static.
235     */
236    private int mDataLoadMode = MODE_STREAM;
237    /**
238     * The current audio channel configuration.
239     */
240    private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
241    /**
242     * The encoding of the audio samples.
243     * @see AudioFormat#ENCODING_PCM_8BIT
244     * @see AudioFormat#ENCODING_PCM_16BIT
245     */
246    private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
247    /**
248     * Audio session ID
249     */
250    private int mSessionId = AudioSystem.AUDIO_SESSION_ALLOCATE;
251    /**
252     * Reference to the app-ops service.
253     */
254    private final IAppOpsService mAppOps;
255
256    //--------------------------------
257    // Used exclusively by native code
258    //--------------------
259    /**
260     * Accessed by native methods: provides access to C++ AudioTrack object.
261     */
262    @SuppressWarnings("unused")
263    private long mNativeTrackInJavaObj;
264    /**
265     * Accessed by native methods: provides access to the JNI data (i.e. resources used by
266     * the native AudioTrack object, but not stored in it).
267     */
268    @SuppressWarnings("unused")
269    private long mJniData;
270
271
272    //--------------------------------------------------------------------------
273    // Constructor, Finalize
274    //--------------------
275    /**
276     * Class constructor.
277     * @param streamType the type of the audio stream. See
278     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
279     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
280     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
281     * @param sampleRateInHz the initial source sample rate expressed in Hz.
282     * @param channelConfig describes the configuration of the audio channels.
283     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
284     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
285     * @param audioFormat the format in which the audio data is represented.
286     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
287     *   {@link AudioFormat#ENCODING_PCM_8BIT}
288     * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
289     *   read from for playback.
290     *   If track's creation mode is {@link #MODE_STREAM}, you can write data into
291     *   this buffer in chunks less than or equal to this size, and it is typical to use
292     *   chunks of 1/2 of the total size to permit double-buffering.
293     *   If the track's creation mode is {@link #MODE_STATIC},
294     *   this is the maximum length sample, or audio clip, that can be played by this instance.
295     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
296     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
297     *   smaller than getMinBufferSize() will result in an initialization failure.
298     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
299     * @throws java.lang.IllegalArgumentException
300     */
301    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
302            int bufferSizeInBytes, int mode)
303    throws IllegalArgumentException {
304        this(streamType, sampleRateInHz, channelConfig, audioFormat,
305                bufferSizeInBytes, mode, AudioSystem.AUDIO_SESSION_ALLOCATE);
306    }
307
308    /**
309     * Class constructor with audio session. Use this constructor when the AudioTrack must be
310     * attached to a particular audio session. The primary use of the audio session ID is to
311     * associate audio effects to a particular instance of AudioTrack: if an audio session ID
312     * is provided when creating an AudioEffect, this effect will be applied only to audio tracks
313     * and media players in the same session and not to the output mix.
314     * When an AudioTrack is created without specifying a session, it will create its own session
315     * which can be retrieved by calling the {@link #getAudioSessionId()} method.
316     * If a non-zero session ID is provided, this AudioTrack will share effects attached to this
317     * session
318     * with all other media players or audio tracks in the same session, otherwise a new session
319     * will be created for this track if none is supplied.
320     * @param streamType the type of the audio stream. See
321     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
322     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
323     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
324     * @param sampleRateInHz the initial source sample rate expressed in Hz.
325     * @param channelConfig describes the configuration of the audio channels.
326     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
327     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
328     * @param audioFormat the format in which the audio data is represented.
329     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
330     *   {@link AudioFormat#ENCODING_PCM_8BIT}
331     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
332     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
333     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
334     *   this is the maximum size of the sound that will be played for this instance.
335     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
336     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
337     *   smaller than getMinBufferSize() will result in an initialization failure.
338     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
339     * @param sessionId Id of audio session the AudioTrack must be attached to
340     * @throws java.lang.IllegalArgumentException
341     */
342    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
343            int bufferSizeInBytes, int mode, int sessionId)
344    throws IllegalArgumentException {
345        // mState already == STATE_UNINITIALIZED
346
347        // remember which looper is associated with the AudioTrack instantiation
348        Looper looper;
349        if ((looper = Looper.myLooper()) == null) {
350            looper = Looper.getMainLooper();
351        }
352        mInitializationLooper = looper;
353
354        audioParamCheck(streamType, sampleRateInHz, channelConfig, audioFormat, mode);
355
356        audioBuffSizeCheck(bufferSizeInBytes);
357
358        IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
359        mAppOps = IAppOpsService.Stub.asInterface(b);
360
361        if (sessionId < 0) {
362            throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
363        }
364
365        int[] session = new int[1];
366        session[0] = sessionId;
367        // native initialization
368        int initResult = native_setup(new WeakReference<AudioTrack>(this),
369                mStreamType, mSampleRate, mChannels, mAudioFormat,
370                mNativeBufferSizeInBytes, mDataLoadMode, session);
371        if (initResult != SUCCESS) {
372            loge("Error code "+initResult+" when initializing AudioTrack.");
373            return; // with mState == STATE_UNINITIALIZED
374        }
375
376        mSessionId = session[0];
377
378        if (mDataLoadMode == MODE_STATIC) {
379            mState = STATE_NO_STATIC_DATA;
380        } else {
381            mState = STATE_INITIALIZED;
382        }
383    }
384
385    // mask of all the channels supported by this implementation
386    private static final int SUPPORTED_OUT_CHANNELS =
387            AudioFormat.CHANNEL_OUT_FRONT_LEFT |
388            AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
389            AudioFormat.CHANNEL_OUT_FRONT_CENTER |
390            AudioFormat.CHANNEL_OUT_LOW_FREQUENCY |
391            AudioFormat.CHANNEL_OUT_BACK_LEFT |
392            AudioFormat.CHANNEL_OUT_BACK_RIGHT |
393            AudioFormat.CHANNEL_OUT_BACK_CENTER;
394
395    // Convenience method for the constructor's parameter checks.
396    // This is where constructor IllegalArgumentException-s are thrown
397    // postconditions:
398    //    mStreamType is valid
399    //    mChannelCount is valid
400    //    mChannels is valid
401    //    mAudioFormat is valid
402    //    mSampleRate is valid
403    //    mDataLoadMode is valid
404    private void audioParamCheck(int streamType, int sampleRateInHz,
405                                 int channelConfig, int audioFormat, int mode) {
406
407        //--------------
408        // stream type
409        if( (streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC)
410           && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
411           && (streamType != AudioManager.STREAM_VOICE_CALL)
412           && (streamType != AudioManager.STREAM_NOTIFICATION)
413           && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
414           && (streamType != AudioManager.STREAM_DTMF)) {
415            throw new IllegalArgumentException("Invalid stream type.");
416        }
417        mStreamType = streamType;
418
419        //--------------
420        // sample rate, note these values are subject to change
421        if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
422            throw new IllegalArgumentException(sampleRateInHz
423                    + "Hz is not a supported sample rate.");
424        }
425        mSampleRate = sampleRateInHz;
426
427        //--------------
428        // channel config
429        mChannelConfiguration = channelConfig;
430
431        switch (channelConfig) {
432        case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
433        case AudioFormat.CHANNEL_OUT_MONO:
434        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
435            mChannelCount = 1;
436            mChannels = AudioFormat.CHANNEL_OUT_MONO;
437            break;
438        case AudioFormat.CHANNEL_OUT_STEREO:
439        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
440            mChannelCount = 2;
441            mChannels = AudioFormat.CHANNEL_OUT_STEREO;
442            break;
443        default:
444            if (!isMultichannelConfigSupported(channelConfig)) {
445                // input channel configuration features unsupported channels
446                throw new IllegalArgumentException("Unsupported channel configuration.");
447            }
448            mChannels = channelConfig;
449            mChannelCount = Integer.bitCount(channelConfig);
450        }
451
452        //--------------
453        // audio format
454        switch (audioFormat) {
455        case AudioFormat.ENCODING_DEFAULT:
456            mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
457            break;
458        case AudioFormat.ENCODING_PCM_16BIT:
459        case AudioFormat.ENCODING_PCM_8BIT:
460            mAudioFormat = audioFormat;
461            break;
462        default:
463            throw new IllegalArgumentException("Unsupported sample encoding."
464                + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
465        }
466
467        //--------------
468        // audio load mode
469        if ( (mode != MODE_STREAM) && (mode != MODE_STATIC) ) {
470            throw new IllegalArgumentException("Invalid mode.");
471        }
472        mDataLoadMode = mode;
473    }
474
475    /**
476     * Convenience method to check that the channel configuration (a.k.a channel mask) is supported
477     * @param channelConfig the mask to validate
478     * @return false if the AudioTrack can't be used with such a mask
479     */
480    private static boolean isMultichannelConfigSupported(int channelConfig) {
481        // check for unsupported channels
482        if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
483            loge("Channel configuration features unsupported channels");
484            return false;
485        }
486        // check for unsupported multichannel combinations:
487        // - FL/FR must be present
488        // - L/R channels must be paired (e.g. no single L channel)
489        final int frontPair =
490                AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
491        if ((channelConfig & frontPair) != frontPair) {
492                loge("Front channels must be present in multichannel configurations");
493                return false;
494        }
495        final int backPair =
496                AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
497        if ((channelConfig & backPair) != 0) {
498            if ((channelConfig & backPair) != backPair) {
499                loge("Rear channels can't be used independently");
500                return false;
501            }
502        }
503        return true;
504    }
505
506
507    // Convenience method for the constructor's audio buffer size check.
508    // preconditions:
509    //    mChannelCount is valid
510    //    mAudioFormat is valid
511    // postcondition:
512    //    mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
513    private void audioBuffSizeCheck(int audioBufferSize) {
514        // NB: this section is only valid with PCM data.
515        //     To update when supporting compressed formats
516        int frameSizeInBytes = mChannelCount
517                * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
518        if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
519            throw new IllegalArgumentException("Invalid audio buffer size.");
520        }
521
522        mNativeBufferSizeInBytes = audioBufferSize;
523        mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
524    }
525
526
527    /**
528     * Releases the native AudioTrack resources.
529     */
530    public void release() {
531        // even though native_release() stops the native AudioTrack, we need to stop
532        // AudioTrack subclasses too.
533        try {
534            stop();
535        } catch(IllegalStateException ise) {
536            // don't raise an exception, we're releasing the resources.
537        }
538        native_release();
539        mState = STATE_UNINITIALIZED;
540    }
541
542    @Override
543    protected void finalize() {
544        native_finalize();
545    }
546
547    //--------------------------------------------------------------------------
548    // Getters
549    //--------------------
550    /**
551     * Returns the minimum valid volume value. Volume values set under this one will
552     * be clamped at this value.
553     * @return the minimum volume expressed as a linear attenuation.
554     */
555    static public float getMinVolume() {
556        return VOLUME_MIN;
557    }
558
559    /**
560     * Returns the maximum valid volume value. Volume values set above this one will
561     * be clamped at this value.
562     * @return the maximum volume expressed as a linear attenuation.
563     */
564    static public float getMaxVolume() {
565        return VOLUME_MAX;
566    }
567
568    /**
569     * Returns the configured audio data sample rate in Hz
570     */
571    public int getSampleRate() {
572        return mSampleRate;
573    }
574
575    /**
576     * Returns the current playback rate in Hz.
577     */
578    public int getPlaybackRate() {
579        return native_get_playback_rate();
580    }
581
582    /**
583     * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
584     * and {@link AudioFormat#ENCODING_PCM_8BIT}.
585     */
586    public int getAudioFormat() {
587        return mAudioFormat;
588    }
589
590    /**
591     * Returns the type of audio stream this AudioTrack is configured for.
592     * Compare the result against {@link AudioManager#STREAM_VOICE_CALL},
593     * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
594     * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
595     * {@link AudioManager#STREAM_NOTIFICATION}, or {@link AudioManager#STREAM_DTMF}.
596     */
597    public int getStreamType() {
598        return mStreamType;
599    }
600
601    /**
602     * Returns the configured channel configuration.
603     * See {@link AudioFormat#CHANNEL_OUT_MONO}
604     * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
605     */
606    public int getChannelConfiguration() {
607        return mChannelConfiguration;
608    }
609
610    /**
611     * Returns the configured number of channels.
612     */
613    public int getChannelCount() {
614        return mChannelCount;
615    }
616
617    /**
618     * Returns the state of the AudioTrack instance. This is useful after the
619     * AudioTrack instance has been created to check if it was initialized
620     * properly. This ensures that the appropriate resources have been acquired.
621     * @see #STATE_INITIALIZED
622     * @see #STATE_NO_STATIC_DATA
623     * @see #STATE_UNINITIALIZED
624     */
625    public int getState() {
626        return mState;
627    }
628
629    /**
630     * Returns the playback state of the AudioTrack instance.
631     * @see #PLAYSTATE_STOPPED
632     * @see #PLAYSTATE_PAUSED
633     * @see #PLAYSTATE_PLAYING
634     */
635    public int getPlayState() {
636        synchronized (mPlayStateLock) {
637            return mPlayState;
638        }
639    }
640
641    /**
642     *  Returns the "native frame count", derived from the bufferSizeInBytes specified at
643     *  creation time and converted to frame units.
644     *  If track's creation mode is {@link #MODE_STATIC},
645     *  it is equal to the specified bufferSizeInBytes converted to frame units.
646     *  If track's creation mode is {@link #MODE_STREAM},
647     *  it is typically greater than or equal to the specified bufferSizeInBytes converted to frame
648     *  units; it may be rounded up to a larger value if needed by the target device implementation.
649     *  @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
650     *  See {@link AudioManager#getProperty(String)} for key
651     *  {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
652     */
653    @Deprecated
654    protected int getNativeFrameCount() {
655        return native_get_native_frame_count();
656    }
657
658    /**
659     * Returns marker position expressed in frames.
660     * @return marker position in wrapping frame units similar to {@link #getPlaybackHeadPosition},
661     * or zero if marker is disabled.
662     */
663    public int getNotificationMarkerPosition() {
664        return native_get_marker_pos();
665    }
666
667    /**
668     * Returns the notification update period expressed in frames.
669     * Zero means that no position update notifications are being delivered.
670     */
671    public int getPositionNotificationPeriod() {
672        return native_get_pos_update_period();
673    }
674
675    /**
676     * Returns the playback head position expressed in frames.
677     * Though the "int" type is signed 32-bits, the value should be reinterpreted as if it is
678     * unsigned 32-bits.  That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
679     * This is a continuously advancing counter.  It will wrap (overflow) periodically,
680     * for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
681     * It is reset to zero by flush(), reload(), and stop().
682     */
683    public int getPlaybackHeadPosition() {
684        return native_get_position();
685    }
686
687    /**
688     * Returns this track's estimated latency in milliseconds. This includes the latency due
689     * to AudioTrack buffer size, AudioMixer (if any) and audio hardware driver.
690     *
691     * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
692     * a better solution.
693     * @hide
694     */
695    public int getLatency() {
696        return native_get_latency();
697    }
698
699    /**
700     *  Returns the output sample rate in Hz for the specified stream type.
701     */
702    static public int getNativeOutputSampleRate(int streamType) {
703        return native_get_output_sample_rate(streamType);
704    }
705
706    /**
707     * Returns the minimum buffer size required for the successful creation of an AudioTrack
708     * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
709     * guarantee a smooth playback under load, and higher values should be chosen according to
710     * the expected frequency at which the buffer will be refilled with additional data to play.
711     * For example, if you intend to dynamically set the source sample rate of an AudioTrack
712     * to a higher value than the initial source sample rate, be sure to configure the buffer size
713     * based on the highest planned sample rate.
714     * @param sampleRateInHz the source sample rate expressed in Hz.
715     * @param channelConfig describes the configuration of the audio channels.
716     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
717     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
718     * @param audioFormat the format in which the audio data is represented.
719     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
720     *   {@link AudioFormat#ENCODING_PCM_8BIT}
721     * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
722     *   or {@link #ERROR} if unable to query for output properties,
723     *   or the minimum buffer size expressed in bytes.
724     */
725    static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
726        int channelCount = 0;
727        switch(channelConfig) {
728        case AudioFormat.CHANNEL_OUT_MONO:
729        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
730            channelCount = 1;
731            break;
732        case AudioFormat.CHANNEL_OUT_STEREO:
733        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
734            channelCount = 2;
735            break;
736        default:
737            if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
738                // input channel configuration features unsupported channels
739                loge("getMinBufferSize(): Invalid channel configuration.");
740                return ERROR_BAD_VALUE;
741            } else {
742                channelCount = Integer.bitCount(channelConfig);
743            }
744        }
745
746        if ((audioFormat != AudioFormat.ENCODING_PCM_16BIT)
747            && (audioFormat != AudioFormat.ENCODING_PCM_8BIT)) {
748            loge("getMinBufferSize(): Invalid audio format.");
749            return ERROR_BAD_VALUE;
750        }
751
752        // sample rate, note these values are subject to change
753        if ( (sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX) ) {
754            loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
755            return ERROR_BAD_VALUE;
756        }
757
758        int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
759        if (size <= 0) {
760            loge("getMinBufferSize(): error querying hardware");
761            return ERROR;
762        }
763        else {
764            return size;
765        }
766    }
767
768    /**
769     * Returns the audio session ID.
770     *
771     * @return the ID of the audio session this AudioTrack belongs to.
772     */
773    public int getAudioSessionId() {
774        return mSessionId;
775    }
776
777   /**
778    * Poll for a timestamp on demand.
779    *
780    * Use if you need to get the most recent timestamp outside of the event callback handler.
781    * Calling this method too often may be inefficient;
782    * if you need a high-resolution mapping between frame position and presentation time,
783    * consider implementing that at application level, based on low-resolution timestamps.
784    * The audio data at the returned position may either already have been
785    * presented, or may have not yet been presented but is committed to be presented.
786    * It is not possible to request the time corresponding to a particular position,
787    * or to request the (fractional) position corresponding to a particular time.
788    * If you need such features, consider implementing them at application level.
789    *
790    * @param timestamp a reference to a non-null AudioTimestamp instance allocated
791    *        and owned by caller.
792    * @return true if a timestamp is available, or false if no timestamp is available.
793    *         If a timestamp if available,
794    *         the AudioTimestamp instance is filled in with a position in frame units, together
795    *         with the estimated time when that frame was presented or is committed to
796    *         be presented.
797    *         In the case that no timestamp is available, any supplied instance is left unaltered.
798    */
799    public boolean getTimestamp(AudioTimestamp timestamp)
800    {
801        if (timestamp == null) {
802            throw new IllegalArgumentException();
803        }
804        // It's unfortunate, but we have to either create garbage every time or use synchronized
805        long[] longArray = new long[2];
806        int ret = native_get_timestamp(longArray);
807        if (ret != SUCCESS) {
808            return false;
809        }
810        timestamp.framePosition = longArray[0];
811        timestamp.nanoTime = longArray[1];
812        return true;
813    }
814
815
816    //--------------------------------------------------------------------------
817    // Initialization / configuration
818    //--------------------
819    /**
820     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
821     * for each periodic playback head position update.
822     * Notifications will be received in the same thread as the one in which the AudioTrack
823     * instance was created.
824     * @param listener
825     */
826    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
827        setPlaybackPositionUpdateListener(listener, null);
828    }
829
830    /**
831     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
832     * for each periodic playback head position update.
833     * Use this method to receive AudioTrack events in the Handler associated with another
834     * thread than the one in which you created the AudioTrack instance.
835     * @param listener
836     * @param handler the Handler that will receive the event notification messages.
837     */
838    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
839                                                    Handler handler) {
840        if (listener != null) {
841            mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
842        } else {
843            mEventHandlerDelegate = null;
844        }
845    }
846
847
848
849     /**
850     * Sets the specified left/right output volume values on the AudioTrack. Values are clamped
851     * to the ({@link #getMinVolume()}, {@link #getMaxVolume()}) interval if outside this range.
852     * @param leftVolume output attenuation for the left channel. A value of 0.0f is silence,
853     *      a value of 1.0f is no attenuation.
854     * @param rightVolume output attenuation for the right channel
855     * @return error code or success, see {@link #SUCCESS},
856     *    {@link #ERROR_INVALID_OPERATION}
857     */
858    public int setStereoVolume(float leftVolume, float rightVolume) {
859        if (isRestricted()) {
860            return SUCCESS;
861        }
862        if (mState == STATE_UNINITIALIZED) {
863            return ERROR_INVALID_OPERATION;
864        }
865
866        // clamp the volumes
867        if (leftVolume < getMinVolume()) {
868            leftVolume = getMinVolume();
869        }
870        if (leftVolume > getMaxVolume()) {
871            leftVolume = getMaxVolume();
872        }
873        if (rightVolume < getMinVolume()) {
874            rightVolume = getMinVolume();
875        }
876        if (rightVolume > getMaxVolume()) {
877            rightVolume = getMaxVolume();
878        }
879
880        native_setVolume(leftVolume, rightVolume);
881
882        return SUCCESS;
883    }
884
885
886    /**
887     * Sets the specified output volume values on all channels of this track.  The value is clamped
888     * to the ({@link #getMinVolume()}, {@link #getMaxVolume()}) interval if outside this range.
889     * @param volume output attenuation for all channels. A value of 0.0f is silence,
890     *      a value of 1.0f is no attenuation.
891     * @return error code or success, see {@link #SUCCESS},
892     *    {@link #ERROR_INVALID_OPERATION}
893     */
894    public int setVolume(float volume) {
895        return setStereoVolume(volume, volume);
896    }
897
898
899    /**
900     * Sets the playback sample rate for this track. This sets the sampling rate at which
901     * the audio data will be consumed and played back
902     * (as set by the sampleRateInHz parameter in the
903     * {@link #AudioTrack(int, int, int, int, int, int)} constructor),
904     * not the original sampling rate of the
905     * content. For example, setting it to half the sample rate of the content will cause the
906     * playback to last twice as long, but will also result in a pitch shift down by one octave.
907     * The valid sample rate range is from 1 Hz to twice the value returned by
908     * {@link #getNativeOutputSampleRate(int)}.
909     * @param sampleRateInHz the sample rate expressed in Hz
910     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
911     *    {@link #ERROR_INVALID_OPERATION}
912     */
913    public int setPlaybackRate(int sampleRateInHz) {
914        if (mState != STATE_INITIALIZED) {
915            return ERROR_INVALID_OPERATION;
916        }
917        if (sampleRateInHz <= 0) {
918            return ERROR_BAD_VALUE;
919        }
920        return native_set_playback_rate(sampleRateInHz);
921    }
922
923
924    /**
925     * Sets the position of the notification marker.  At most one marker can be active.
926     * @param markerInFrames marker position in wrapping frame units similar to
927     * {@link #getPlaybackHeadPosition}, or zero to disable the marker.
928     * To set a marker at a position which would appear as zero due to wraparound,
929     * a workaround is to use a non-zero position near zero, such as -1 or 1.
930     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
931     *  {@link #ERROR_INVALID_OPERATION}
932     */
933    public int setNotificationMarkerPosition(int markerInFrames) {
934        if (mState == STATE_UNINITIALIZED) {
935            return ERROR_INVALID_OPERATION;
936        }
937        return native_set_marker_pos(markerInFrames);
938    }
939
940
941    /**
942     * Sets the period for the periodic notification event.
943     * @param periodInFrames update period expressed in frames
944     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
945     */
946    public int setPositionNotificationPeriod(int periodInFrames) {
947        if (mState == STATE_UNINITIALIZED) {
948            return ERROR_INVALID_OPERATION;
949        }
950        return native_set_pos_update_period(periodInFrames);
951    }
952
953
954    /**
955     * Sets the playback head position.
956     * The track must be stopped or paused for the position to be changed,
957     * and must use the {@link #MODE_STATIC} mode.
958     * @param positionInFrames playback head position expressed in frames
959     * Zero corresponds to start of buffer.
960     * The position must not be greater than the buffer size in frames, or negative.
961     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
962     *    {@link #ERROR_INVALID_OPERATION}
963     */
964    public int setPlaybackHeadPosition(int positionInFrames) {
965        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
966                getPlayState() == PLAYSTATE_PLAYING) {
967            return ERROR_INVALID_OPERATION;
968        }
969        if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
970            return ERROR_BAD_VALUE;
971        }
972        return native_set_position(positionInFrames);
973    }
974
975    /**
976     * Sets the loop points and the loop count. The loop can be infinite.
977     * Similarly to setPlaybackHeadPosition,
978     * the track must be stopped or paused for the loop points to be changed,
979     * and must use the {@link #MODE_STATIC} mode.
980     * @param startInFrames loop start marker expressed in frames
981     * Zero corresponds to start of buffer.
982     * The start marker must not be greater than or equal to the buffer size in frames, or negative.
983     * @param endInFrames loop end marker expressed in frames
984     * The total buffer size in frames corresponds to end of buffer.
985     * The end marker must not be greater than the buffer size in frames.
986     * For looping, the end marker must not be less than or equal to the start marker,
987     * but to disable looping
988     * it is permitted for start marker, end marker, and loop count to all be 0.
989     * @param loopCount the number of times the loop is looped.
990     *    A value of -1 means infinite looping, and 0 disables looping.
991     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
992     *    {@link #ERROR_INVALID_OPERATION}
993     */
994    public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
995        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
996                getPlayState() == PLAYSTATE_PLAYING) {
997            return ERROR_INVALID_OPERATION;
998        }
999        if (loopCount == 0) {
1000            ;   // explicitly allowed as an exception to the loop region range check
1001        } else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames &&
1002                startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
1003            return ERROR_BAD_VALUE;
1004        }
1005        return native_set_loop(startInFrames, endInFrames, loopCount);
1006    }
1007
1008    /**
1009     * Sets the initialization state of the instance. This method was originally intended to be used
1010     * in an AudioTrack subclass constructor to set a subclass-specific post-initialization state.
1011     * However, subclasses of AudioTrack are no longer recommended, so this method is obsolete.
1012     * @param state the state of the AudioTrack instance
1013     * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
1014     */
1015    @Deprecated
1016    protected void setState(int state) {
1017        mState = state;
1018    }
1019
1020
1021    //---------------------------------------------------------
1022    // Transport control methods
1023    //--------------------
1024    /**
1025     * Starts playing an AudioTrack.
1026     * If track's creation mode is {@link #MODE_STATIC}, you must have called write() prior.
1027     *
1028     * @throws IllegalStateException
1029     */
1030    public void play()
1031    throws IllegalStateException {
1032        if (mState != STATE_INITIALIZED) {
1033            throw new IllegalStateException("play() called on uninitialized AudioTrack.");
1034        }
1035        if (isRestricted()) {
1036            setVolume(0);
1037        }
1038        synchronized(mPlayStateLock) {
1039            native_start();
1040            mPlayState = PLAYSTATE_PLAYING;
1041        }
1042    }
1043
1044    private boolean isRestricted() {
1045        try {
1046            final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, mStreamType,
1047                    Process.myUid(), ActivityThread.currentPackageName());
1048            return mode != AppOpsManager.MODE_ALLOWED;
1049        } catch (RemoteException e) {
1050            return false;
1051        }
1052    }
1053
1054    /**
1055     * Stops playing the audio data.
1056     * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing
1057     * after the last buffer that was written has been played. For an immediate stop, use
1058     * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played
1059     * back yet.
1060     * @throws IllegalStateException
1061     */
1062    public void stop()
1063    throws IllegalStateException {
1064        if (mState != STATE_INITIALIZED) {
1065            throw new IllegalStateException("stop() called on uninitialized AudioTrack.");
1066        }
1067
1068        // stop playing
1069        synchronized(mPlayStateLock) {
1070            native_stop();
1071            mPlayState = PLAYSTATE_STOPPED;
1072        }
1073    }
1074
1075    /**
1076     * Pauses the playback of the audio data. Data that has not been played
1077     * back will not be discarded. Subsequent calls to {@link #play} will play
1078     * this data back. See {@link #flush()} to discard this data.
1079     *
1080     * @throws IllegalStateException
1081     */
1082    public void pause()
1083    throws IllegalStateException {
1084        if (mState != STATE_INITIALIZED) {
1085            throw new IllegalStateException("pause() called on uninitialized AudioTrack.");
1086        }
1087        //logd("pause()");
1088
1089        // pause playback
1090        synchronized(mPlayStateLock) {
1091            native_pause();
1092            mPlayState = PLAYSTATE_PAUSED;
1093        }
1094    }
1095
1096
1097    //---------------------------------------------------------
1098    // Audio data supply
1099    //--------------------
1100
1101    /**
1102     * Flushes the audio data currently queued for playback. Any data that has
1103     * not been played back will be discarded.  No-op if not stopped or paused,
1104     * or if the track's creation mode is not {@link #MODE_STREAM}.
1105     */
1106    public void flush() {
1107        if (mState == STATE_INITIALIZED) {
1108            // flush the data in native layer
1109            native_flush();
1110        }
1111
1112    }
1113
1114    /**
1115     * Writes the audio data to the audio sink for playback (streaming mode),
1116     * or copies audio data for later playback (static buffer mode).
1117     * In streaming mode, will block until all data has been written to the audio sink.
1118     * In static buffer mode, copies the data to the buffer starting at offset 0.
1119     * Note that the actual playback of this data might occur after this function
1120     * returns. This function is thread safe with respect to {@link #stop} calls,
1121     * in which case all of the specified data might not be written to the audio sink.
1122     *
1123     * @param audioData the array that holds the data to play.
1124     * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
1125     *    starts.
1126     * @param sizeInBytes the number of bytes to read in audioData after the offset.
1127     * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
1128     *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1129     *    the parameters don't resolve to valid data and indexes.
1130     */
1131
1132    public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
1133
1134        if (mState == STATE_UNINITIALIZED) {
1135            return ERROR_INVALID_OPERATION;
1136        }
1137
1138        if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
1139                || (offsetInBytes + sizeInBytes < 0)    // detect integer overflow
1140                || (offsetInBytes + sizeInBytes > audioData.length)) {
1141            return ERROR_BAD_VALUE;
1142        }
1143
1144        int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat,
1145                true /*isBlocking*/);
1146
1147        if ((mDataLoadMode == MODE_STATIC)
1148                && (mState == STATE_NO_STATIC_DATA)
1149                && (ret > 0)) {
1150            // benign race with respect to other APIs that read mState
1151            mState = STATE_INITIALIZED;
1152        }
1153
1154        return ret;
1155    }
1156
1157
1158    /**
1159     * Writes the audio data to the audio sink for playback (streaming mode),
1160     * or copies audio data for later playback (static buffer mode).
1161     * In streaming mode, will block until all data has been written to the audio sink.
1162     * In static buffer mode, copies the data to the buffer starting at offset 0.
1163     * Note that the actual playback of this data might occur after this function
1164     * returns. This function is thread safe with respect to {@link #stop} calls,
1165     * in which case all of the specified data might not be written to the audio sink.
1166     *
1167     * @param audioData the array that holds the data to play.
1168     * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
1169     *     starts.
1170     * @param sizeInShorts the number of shorts to read in audioData after the offset.
1171     * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
1172      *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1173      *    the parameters don't resolve to valid data and indexes.
1174     */
1175
1176    public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
1177
1178        if (mState == STATE_UNINITIALIZED) {
1179            return ERROR_INVALID_OPERATION;
1180        }
1181
1182        if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
1183                || (offsetInShorts + sizeInShorts < 0)  // detect integer overflow
1184                || (offsetInShorts + sizeInShorts > audioData.length)) {
1185            return ERROR_BAD_VALUE;
1186        }
1187
1188        int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
1189
1190        if ((mDataLoadMode == MODE_STATIC)
1191                && (mState == STATE_NO_STATIC_DATA)
1192                && (ret > 0)) {
1193            // benign race with respect to other APIs that read mState
1194            mState = STATE_INITIALIZED;
1195        }
1196
1197        return ret;
1198    }
1199
1200
1201    /**
1202     * @hide CANDIDATE FOR PUBLIC API
1203     * Writes the audio data to the audio sink for playback (streaming mode),
1204     * or copies audio data for later playback (static buffer mode).
1205     * In static buffer mode, copies the data to the buffer starting at its 0 offset, and the write
1206     * mode is ignored.
1207     * In streaming mode, the blocking behavior will depend on the write mode.
1208     * @param audioData the buffer that holds the data to play, starting at the position reported
1209     *     by <code>audioData.position()</code>.
1210     *     <BR>Note that this method will not update the position in this buffer, therefore when
1211     *     writing a loop to write all the data in the buffer, you should increment the
1212     *     <code>offsetInBytes</code> parameter at each pass by the amount that was previously
1213     *     written for this buffer.
1214     * @param offsetInBytes offset to read from in bytes (note this differs from
1215     *     <code>audioData.position()</code>).
1216     * @param sizeInBytes number of bytes to read (note this differs from
1217     *     <code>audioData.remaining()</code>).
1218     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
1219     *     effect in static mode.
1220     *     <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
1221     *         to the audio sink.
1222     *     <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
1223     *     queuing as much audio data for playback as possible without blocking.
1224     * @return 0 or a positive number of bytes that were written, or
1225     *     {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}
1226     */
1227    public int write(ByteBuffer audioData, int offsetInBytes, int sizeInBytes,
1228            @WriteMode int writeMode) {
1229
1230        if (mState == STATE_UNINITIALIZED) {
1231            Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
1232            return ERROR_INVALID_OPERATION;
1233        }
1234
1235        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
1236            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
1237            return ERROR_BAD_VALUE;
1238        }
1239
1240        if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
1241                || (offsetInBytes + sizeInBytes < 0)    // detect integer overflow
1242                || (offsetInBytes + sizeInBytes > audioData.remaining())) {
1243            Log.e(TAG, "AudioTrack.write() called with invalid size/offset values");
1244            return ERROR_BAD_VALUE;
1245        }
1246
1247        int ret = 0;
1248        if (audioData.isDirect()) {
1249            ret = native_write_native_bytes(audioData,
1250                    audioData.position(),
1251                    offsetInBytes, sizeInBytes, mAudioFormat,
1252                    writeMode == WRITE_BLOCKING);
1253        } else {
1254            ret = native_write_byte(NioUtils.unsafeArray(audioData),
1255                    NioUtils.unsafeArrayOffset(audioData) + audioData.position() + offsetInBytes,
1256                    sizeInBytes, mAudioFormat,
1257                    writeMode == WRITE_BLOCKING);
1258        }
1259
1260        if ((mDataLoadMode == MODE_STATIC)
1261                && (mState == STATE_NO_STATIC_DATA)
1262                && (ret > 0)) {
1263            // benign race with respect to other APIs that read mState
1264            mState = STATE_INITIALIZED;
1265        }
1266
1267        return ret;
1268    }
1269
1270    /**
1271     * Notifies the native resource to reuse the audio data already loaded in the native
1272     * layer, that is to rewind to start of buffer.
1273     * The track's creation mode must be {@link #MODE_STATIC}.
1274     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1275     *  {@link #ERROR_INVALID_OPERATION}
1276     */
1277    public int reloadStaticData() {
1278        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
1279            return ERROR_INVALID_OPERATION;
1280        }
1281        return native_reload_static();
1282    }
1283
1284    //--------------------------------------------------------------------------
1285    // Audio effects management
1286    //--------------------
1287
1288    /**
1289     * Attaches an auxiliary effect to the audio track. A typical auxiliary
1290     * effect is a reverberation effect which can be applied on any sound source
1291     * that directs a certain amount of its energy to this effect. This amount
1292     * is defined by setAuxEffectSendLevel().
1293     * {@see #setAuxEffectSendLevel(float)}.
1294     * <p>After creating an auxiliary effect (e.g.
1295     * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
1296     * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling
1297     * this method to attach the audio track to the effect.
1298     * <p>To detach the effect from the audio track, call this method with a
1299     * null effect id.
1300     *
1301     * @param effectId system wide unique id of the effect to attach
1302     * @return error code or success, see {@link #SUCCESS},
1303     *    {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE}
1304     */
1305    public int attachAuxEffect(int effectId) {
1306        if (mState == STATE_UNINITIALIZED) {
1307            return ERROR_INVALID_OPERATION;
1308        }
1309        return native_attachAuxEffect(effectId);
1310    }
1311
1312    /**
1313     * Sets the send level of the audio track to the attached auxiliary effect
1314     * {@link #attachAuxEffect(int)}.  The level value range is 0.0f to 1.0f.
1315     * Values are clamped to the (0.0f, 1.0f) interval if outside this range.
1316     * <p>By default the send level is 0.0f, so even if an effect is attached to the player
1317     * this method must be called for the effect to be applied.
1318     * <p>Note that the passed level value is a raw scalar. UI controls should be scaled
1319     * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
1320     * so an appropriate conversion from linear UI input x to level is:
1321     * x == 0 -&gt; level = 0
1322     * 0 &lt; x &lt;= R -&gt; level = 10^(72*(x-R)/20/R)
1323     *
1324     * @param level send level scalar
1325     * @return error code or success, see {@link #SUCCESS},
1326     *    {@link #ERROR_INVALID_OPERATION}
1327     */
1328    public int setAuxEffectSendLevel(float level) {
1329        if (isRestricted()) {
1330            return SUCCESS;
1331        }
1332        if (mState == STATE_UNINITIALIZED) {
1333            return ERROR_INVALID_OPERATION;
1334        }
1335        // clamp the level
1336        if (level < getMinVolume()) {
1337            level = getMinVolume();
1338        }
1339        if (level > getMaxVolume()) {
1340            level = getMaxVolume();
1341        }
1342        native_setAuxEffectSendLevel(level);
1343        return SUCCESS;
1344    }
1345
1346    //---------------------------------------------------------
1347    // Interface definitions
1348    //--------------------
1349    /**
1350     * Interface definition for a callback to be invoked when the playback head position of
1351     * an AudioTrack has reached a notification marker or has increased by a certain period.
1352     */
1353    public interface OnPlaybackPositionUpdateListener  {
1354        /**
1355         * Called on the listener to notify it that the previously set marker has been reached
1356         * by the playback head.
1357         */
1358        void onMarkerReached(AudioTrack track);
1359
1360        /**
1361         * Called on the listener to periodically notify it that the playback head has reached
1362         * a multiple of the notification period.
1363         */
1364        void onPeriodicNotification(AudioTrack track);
1365    }
1366
1367
1368    //---------------------------------------------------------
1369    // Inner classes
1370    //--------------------
1371    /**
1372     * Helper class to handle the forwarding of native events to the appropriate listener
1373     * (potentially) handled in a different thread
1374     */
1375    private class NativeEventHandlerDelegate {
1376        private final Handler mHandler;
1377
1378        NativeEventHandlerDelegate(final AudioTrack track,
1379                                   final OnPlaybackPositionUpdateListener listener,
1380                                   Handler handler) {
1381            // find the looper for our new event handler
1382            Looper looper;
1383            if (handler != null) {
1384                looper = handler.getLooper();
1385            } else {
1386                // no given handler, use the looper the AudioTrack was created in
1387                looper = mInitializationLooper;
1388            }
1389
1390            // construct the event handler with this looper
1391            if (looper != null) {
1392                // implement the event handler delegate
1393                mHandler = new Handler(looper) {
1394                    @Override
1395                    public void handleMessage(Message msg) {
1396                        if (track == null) {
1397                            return;
1398                        }
1399                        switch(msg.what) {
1400                        case NATIVE_EVENT_MARKER:
1401                            if (listener != null) {
1402                                listener.onMarkerReached(track);
1403                            }
1404                            break;
1405                        case NATIVE_EVENT_NEW_POS:
1406                            if (listener != null) {
1407                                listener.onPeriodicNotification(track);
1408                            }
1409                            break;
1410                        default:
1411                            loge("Unknown native event type: " + msg.what);
1412                            break;
1413                        }
1414                    }
1415                };
1416            } else {
1417                mHandler = null;
1418            }
1419        }
1420
1421        Handler getHandler() {
1422            return mHandler;
1423        }
1424    }
1425
1426
1427    //---------------------------------------------------------
1428    // Java methods called from the native side
1429    //--------------------
1430    @SuppressWarnings("unused")
1431    private static void postEventFromNative(Object audiotrack_ref,
1432            int what, int arg1, int arg2, Object obj) {
1433        //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
1434        AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get();
1435        if (track == null) {
1436            return;
1437        }
1438
1439        NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
1440        if (delegate != null) {
1441            Handler handler = delegate.getHandler();
1442            if (handler != null) {
1443                Message m = handler.obtainMessage(what, arg1, arg2, obj);
1444                handler.sendMessage(m);
1445            }
1446        }
1447
1448    }
1449
1450
1451    //---------------------------------------------------------
1452    // Native methods called from the Java side
1453    //--------------------
1454
1455    private native final int native_setup(Object audiotrack_this,
1456            int streamType, int sampleRate, int channelMask, int audioFormat,
1457            int buffSizeInBytes, int mode, int[] sessionId);
1458
1459    private native final void native_finalize();
1460
1461    private native final void native_release();
1462
1463    private native final void native_start();
1464
1465    private native final void native_stop();
1466
1467    private native final void native_pause();
1468
1469    private native final void native_flush();
1470
1471    private native final int native_write_byte(byte[] audioData,
1472                                               int offsetInBytes, int sizeInBytes, int format,
1473                                               boolean isBlocking);
1474
1475    private native final int native_write_short(short[] audioData,
1476                                                int offsetInShorts, int sizeInShorts, int format);
1477
1478    private native final int native_write_native_bytes(Object audioData,
1479            int positionInBytes, int offsetInBytes, int sizeInBytes, int format, boolean blocking);
1480
1481    private native final int native_reload_static();
1482
1483    private native final int native_get_native_frame_count();
1484
1485    private native final void native_setVolume(float leftVolume, float rightVolume);
1486
1487    private native final int native_set_playback_rate(int sampleRateInHz);
1488    private native final int native_get_playback_rate();
1489
1490    private native final int native_set_marker_pos(int marker);
1491    private native final int native_get_marker_pos();
1492
1493    private native final int native_set_pos_update_period(int updatePeriod);
1494    private native final int native_get_pos_update_period();
1495
1496    private native final int native_set_position(int position);
1497    private native final int native_get_position();
1498
1499    private native final int native_get_latency();
1500
1501    // longArray must be a non-null array of length >= 2
1502    // [0] is assigned the frame position
1503    // [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
1504    private native final int native_get_timestamp(long[] longArray);
1505
1506    private native final int native_set_loop(int start, int end, int loopCount);
1507
1508    static private native final int native_get_output_sample_rate(int streamType);
1509    static private native final int native_get_min_buff_size(
1510            int sampleRateInHz, int channelConfig, int audioFormat);
1511
1512    private native final int native_attachAuxEffect(int effectId);
1513    private native final void native_setAuxEffectSendLevel(float level);
1514
1515    //---------------------------------------------------------
1516    // Utility methods
1517    //------------------
1518
1519    private static void logd(String msg) {
1520        Log.d(TAG, msg);
1521    }
1522
1523    private static void loge(String msg) {
1524        Log.e(TAG, msg);
1525    }
1526
1527}
1528