AudioTrack.java revision ed36ca3d4f4f8aa0ee2951eab143a3c96b76158b
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import java.lang.annotation.Retention;
20import java.lang.annotation.RetentionPolicy;
21import java.lang.ref.WeakReference;
22import java.lang.Math;
23import java.nio.ByteBuffer;
24import java.nio.ByteOrder;
25import java.nio.NioUtils;
26import java.util.Collection;
27
28import android.annotation.IntDef;
29import android.annotation.NonNull;
30import android.app.ActivityThread;
31import android.app.AppOpsManager;
32import android.content.Context;
33import android.os.Handler;
34import android.os.IBinder;
35import android.os.Looper;
36import android.os.Message;
37import android.os.Process;
38import android.os.RemoteException;
39import android.os.ServiceManager;
40import android.util.ArrayMap;
41import android.util.Log;
42
43import com.android.internal.app.IAppOpsService;
44
45
46/**
47 * The AudioTrack class manages and plays a single audio resource for Java applications.
48 * It allows streaming of PCM audio buffers to the audio sink for playback. This is
49 * achieved by "pushing" the data to the AudioTrack object using one of the
50 *  {@link #write(byte[], int, int)}, {@link #write(short[], int, int)},
51 *  and {@link #write(float[], int, int, int)} methods.
52 *
53 * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
54 * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
55 * one of the {@code write()} methods. These are blocking and return when the data has been
56 * transferred from the Java layer to the native layer and queued for playback. The streaming
57 * mode is most useful when playing blocks of audio data that for instance are:
58 *
59 * <ul>
60 *   <li>too big to fit in memory because of the duration of the sound to play,</li>
61 *   <li>too big to fit in memory because of the characteristics of the audio data
62 *         (high sampling rate, bits per sample ...)</li>
63 *   <li>received or generated while previously queued audio is playing.</li>
64 * </ul>
65 *
66 * The static mode should be chosen when dealing with short sounds that fit in memory and
67 * that need to be played with the smallest latency possible. The static mode will
68 * therefore be preferred for UI and game sounds that are played often, and with the
69 * smallest overhead possible.
70 *
71 * <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
72 * The size of this buffer, specified during the construction, determines how long an AudioTrack
73 * can play before running out of data.<br>
74 * For an AudioTrack using the static mode, this size is the maximum size of the sound that can
75 * be played from it.<br>
76 * For the streaming mode, data will be written to the audio sink in chunks of
77 * sizes less than or equal to the total buffer size.
78 *
79 * AudioTrack is not final and thus permits subclasses, but such use is not recommended.
80 */
81public class AudioTrack implements AudioRouting
82{
83    //---------------------------------------------------------
84    // Constants
85    //--------------------
86    /** Minimum value for a linear gain or auxiliary effect level.
87     *  This value must be exactly equal to 0.0f; do not change it.
88     */
89    private static final float GAIN_MIN = 0.0f;
90    /** Maximum value for a linear gain or auxiliary effect level.
91     *  This value must be greater than or equal to 1.0f.
92     */
93    private static final float GAIN_MAX = 1.0f;
94
95    /** Minimum value for sample rate */
96    private static final int SAMPLE_RATE_HZ_MIN = 4000;
97    /** Maximum value for sample rate */
98    private static final int SAMPLE_RATE_HZ_MAX = 192000;
99
100    /** Maximum value for AudioTrack channel count
101     * @hide public for MediaCode only, do not un-hide or change to a numeric literal
102     */
103    public static final int CHANNEL_COUNT_MAX = native_get_FCC_8();
104
105    /** indicates AudioTrack state is stopped */
106    public static final int PLAYSTATE_STOPPED = 1;  // matches SL_PLAYSTATE_STOPPED
107    /** indicates AudioTrack state is paused */
108    public static final int PLAYSTATE_PAUSED  = 2;  // matches SL_PLAYSTATE_PAUSED
109    /** indicates AudioTrack state is playing */
110    public static final int PLAYSTATE_PLAYING = 3;  // matches SL_PLAYSTATE_PLAYING
111
112    // keep these values in sync with android_media_AudioTrack.cpp
113    /**
114     * Creation mode where audio data is transferred from Java to the native layer
115     * only once before the audio starts playing.
116     */
117    public static final int MODE_STATIC = 0;
118    /**
119     * Creation mode where audio data is streamed from Java to the native layer
120     * as the audio is playing.
121     */
122    public static final int MODE_STREAM = 1;
123
124    /** @hide */
125    @IntDef({
126        MODE_STATIC,
127        MODE_STREAM
128    })
129    @Retention(RetentionPolicy.SOURCE)
130    public @interface TransferMode {}
131
132    /**
133     * State of an AudioTrack that was not successfully initialized upon creation.
134     */
135    public static final int STATE_UNINITIALIZED = 0;
136    /**
137     * State of an AudioTrack that is ready to be used.
138     */
139    public static final int STATE_INITIALIZED   = 1;
140    /**
141     * State of a successfully initialized AudioTrack that uses static data,
142     * but that hasn't received that data yet.
143     */
144    public static final int STATE_NO_STATIC_DATA = 2;
145
146    /**
147     * Denotes a successful operation.
148     */
149    public  static final int SUCCESS                               = AudioSystem.SUCCESS;
150    /**
151     * Denotes a generic operation failure.
152     */
153    public  static final int ERROR                                 = AudioSystem.ERROR;
154    /**
155     * Denotes a failure due to the use of an invalid value.
156     */
157    public  static final int ERROR_BAD_VALUE                       = AudioSystem.BAD_VALUE;
158    /**
159     * Denotes a failure due to the improper use of a method.
160     */
161    public  static final int ERROR_INVALID_OPERATION               = AudioSystem.INVALID_OPERATION;
162    /**
163     * An error code indicating that the object reporting it is no longer valid and needs to
164     * be recreated.
165     * @hide
166     */
167    public  static final int ERROR_DEAD_OBJECT                     = AudioSystem.DEAD_OBJECT;
168    /**
169     * {@link #getTimestampWithStatus(AudioTimestamp)} is called in STOPPED or FLUSHED state,
170     * or immediately after start/ACTIVE.
171     * @hide
172     */
173    public  static final int ERROR_WOULD_BLOCK                     = AudioSystem.WOULD_BLOCK;
174
175    // Error codes:
176    // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
177    private static final int ERROR_NATIVESETUP_AUDIOSYSTEM         = -16;
178    private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK  = -17;
179    private static final int ERROR_NATIVESETUP_INVALIDFORMAT       = -18;
180    private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE   = -19;
181    private static final int ERROR_NATIVESETUP_NATIVEINITFAILED    = -20;
182
183    // Events:
184    // to keep in sync with frameworks/av/include/media/AudioTrack.h
185    /**
186     * Event id denotes when playback head has reached a previously set marker.
187     */
188    private static final int NATIVE_EVENT_MARKER  = 3;
189    /**
190     * Event id denotes when previously set update period has elapsed during playback.
191     */
192    private static final int NATIVE_EVENT_NEW_POS = 4;
193
194    private final static String TAG = "android.media.AudioTrack";
195
196
197    /** @hide */
198    @IntDef({
199        WRITE_BLOCKING,
200        WRITE_NON_BLOCKING
201    })
202    @Retention(RetentionPolicy.SOURCE)
203    public @interface WriteMode {}
204
205    /**
206     * The write mode indicating the write operation will block until all data has been written,
207     * to be used as the actual value of the writeMode parameter in
208     * {@link #write(byte[], int, int, int)}, {@link #write(short[], int, int, int)},
209     * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
210     * {@link #write(ByteBuffer, int, int, long)}.
211     */
212    public final static int WRITE_BLOCKING = 0;
213
214    /**
215     * The write mode indicating the write operation will return immediately after
216     * queuing as much audio data for playback as possible without blocking,
217     * to be used as the actual value of the writeMode parameter in
218     * {@link #write(ByteBuffer, int, int)}, {@link #write(short[], int, int, int)},
219     * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
220     * {@link #write(ByteBuffer, int, int, long)}.
221     */
222    public final static int WRITE_NON_BLOCKING = 1;
223
224    //--------------------------------------------------------------------------
225    // Member variables
226    //--------------------
227    /**
228     * Indicates the state of the AudioTrack instance.
229     * One of STATE_UNINITIALIZED, STATE_INITIALIZED, or STATE_NO_STATIC_DATA.
230     */
231    private int mState = STATE_UNINITIALIZED;
232    /**
233     * Indicates the play state of the AudioTrack instance.
234     * One of PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, or PLAYSTATE_PLAYING.
235     */
236    private int mPlayState = PLAYSTATE_STOPPED;
237    /**
238     * Lock to ensure mPlayState updates reflect the actual state of the object.
239     */
240    private final Object mPlayStateLock = new Object();
241    /**
242     * Sizes of the native audio buffer.
243     * These values are set during construction and can be stale.
244     * To obtain the current native audio buffer frame count use {@link #getBufferSizeInFrames()}.
245     */
246    private int mNativeBufferSizeInBytes = 0;
247    private int mNativeBufferSizeInFrames = 0;
248    /**
249     * Handler for events coming from the native code.
250     */
251    private NativePositionEventHandlerDelegate mEventHandlerDelegate;
252    /**
253     * Looper associated with the thread that creates the AudioTrack instance.
254     */
255    private final Looper mInitializationLooper;
256    /**
257     * The audio data source sampling rate in Hz.
258     */
259    private int mSampleRate; // initialized by all constructors via audioParamCheck()
260    /**
261     * The number of audio output channels (1 is mono, 2 is stereo, etc.).
262     */
263    private int mChannelCount = 1;
264    /**
265     * The audio channel mask used for calling native AudioTrack
266     */
267    private int mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
268
269    /**
270     * The type of the audio stream to play. See
271     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
272     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
273     *   {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and
274     *   {@link AudioManager#STREAM_DTMF}.
275     */
276    private int mStreamType = AudioManager.STREAM_MUSIC;
277
278    private final AudioAttributes mAttributes;
279    /**
280     * The way audio is consumed by the audio sink, one of MODE_STATIC or MODE_STREAM.
281     */
282    private int mDataLoadMode = MODE_STREAM;
283    /**
284     * The current channel position mask, as specified on AudioTrack creation.
285     * Can be set simultaneously with channel index mask {@link #mChannelIndexMask}.
286     * May be set to {@link AudioFormat#CHANNEL_INVALID} if a channel index mask is specified.
287     */
288    private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
289    /**
290     * The channel index mask if specified, otherwise 0.
291     */
292    private int mChannelIndexMask = 0;
293    /**
294     * The encoding of the audio samples.
295     * @see AudioFormat#ENCODING_PCM_8BIT
296     * @see AudioFormat#ENCODING_PCM_16BIT
297     * @see AudioFormat#ENCODING_PCM_FLOAT
298     */
299    private int mAudioFormat;   // initialized by all constructors via audioParamCheck()
300    /**
301     * Audio session ID
302     */
303    private int mSessionId = AudioSystem.AUDIO_SESSION_ALLOCATE;
304    /**
305     * Reference to the app-ops service.
306     */
307    private final IAppOpsService mAppOps;
308    /**
309     * HW_AV_SYNC track AV Sync Header
310     */
311    private ByteBuffer mAvSyncHeader = null;
312    /**
313     * HW_AV_SYNC track audio data bytes remaining to write after current AV sync header
314     */
315    private int mAvSyncBytesRemaining = 0;
316
317    //--------------------------------
318    // Used exclusively by native code
319    //--------------------
320    /**
321     * @hide
322     * Accessed by native methods: provides access to C++ AudioTrack object.
323     */
324    @SuppressWarnings("unused")
325    protected long mNativeTrackInJavaObj;
326    /**
327     * Accessed by native methods: provides access to the JNI data (i.e. resources used by
328     * the native AudioTrack object, but not stored in it).
329     */
330    @SuppressWarnings("unused")
331    private long mJniData;
332
333
334    //--------------------------------------------------------------------------
335    // Constructor, Finalize
336    //--------------------
337    /**
338     * Class constructor.
339     * @param streamType the type of the audio stream. See
340     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
341     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
342     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
343     * @param sampleRateInHz the initial source sample rate expressed in Hz.
344     * @param channelConfig describes the configuration of the audio channels.
345     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
346     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
347     * @param audioFormat the format in which the audio data is represented.
348     *   See {@link AudioFormat#ENCODING_PCM_16BIT},
349     *   {@link AudioFormat#ENCODING_PCM_8BIT},
350     *   and {@link AudioFormat#ENCODING_PCM_FLOAT}.
351     * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
352     *   read from for playback. This should be a multiple of the frame size in bytes.
353     *   <p> If the track's creation mode is {@link #MODE_STATIC},
354     *   this is the maximum length sample, or audio clip, that can be played by this instance.
355     *   <p> If the track's creation mode is {@link #MODE_STREAM},
356     *   this should be the desired buffer size
357     *   for the <code>AudioTrack</code> to satisfy the application's
358     *   natural latency requirements.
359     *   If <code>bufferSizeInBytes</code> is less than the
360     *   minimum buffer size for the output sink, it is automatically increased to the minimum
361     *   buffer size.
362     *   The method {@link #getBufferSizeInFrames()} returns the
363     *   actual size in frames of the native buffer created, which
364     *   determines the frequency to write
365     *   to the streaming <code>AudioTrack</code> to avoid underrun.
366     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
367     * @throws java.lang.IllegalArgumentException
368     */
369    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
370            int bufferSizeInBytes, int mode)
371    throws IllegalArgumentException {
372        this(streamType, sampleRateInHz, channelConfig, audioFormat,
373                bufferSizeInBytes, mode, AudioSystem.AUDIO_SESSION_ALLOCATE);
374    }
375
376    /**
377     * Class constructor with audio session. Use this constructor when the AudioTrack must be
378     * attached to a particular audio session. The primary use of the audio session ID is to
379     * associate audio effects to a particular instance of AudioTrack: if an audio session ID
380     * is provided when creating an AudioEffect, this effect will be applied only to audio tracks
381     * and media players in the same session and not to the output mix.
382     * When an AudioTrack is created without specifying a session, it will create its own session
383     * which can be retrieved by calling the {@link #getAudioSessionId()} method.
384     * If a non-zero session ID is provided, this AudioTrack will share effects attached to this
385     * session
386     * with all other media players or audio tracks in the same session, otherwise a new session
387     * will be created for this track if none is supplied.
388     * @param streamType the type of the audio stream. See
389     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
390     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
391     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
392     * @param sampleRateInHz the initial source sample rate expressed in Hz.
393     * @param channelConfig describes the configuration of the audio channels.
394     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
395     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
396     * @param audioFormat the format in which the audio data is represented.
397     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
398     *   {@link AudioFormat#ENCODING_PCM_8BIT},
399     *   and {@link AudioFormat#ENCODING_PCM_FLOAT}.
400     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
401     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
402     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
403     *   this is the maximum size of the sound that will be played for this instance.
404     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
405     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
406     *   smaller than getMinBufferSize() will result in an initialization failure.
407     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
408     * @param sessionId Id of audio session the AudioTrack must be attached to
409     * @throws java.lang.IllegalArgumentException
410     */
411    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
412            int bufferSizeInBytes, int mode, int sessionId)
413    throws IllegalArgumentException {
414        // mState already == STATE_UNINITIALIZED
415        this((new AudioAttributes.Builder())
416                    .setLegacyStreamType(streamType)
417                    .build(),
418                (new AudioFormat.Builder())
419                    .setChannelMask(channelConfig)
420                    .setEncoding(audioFormat)
421                    .setSampleRate(sampleRateInHz)
422                    .build(),
423                bufferSizeInBytes,
424                mode, sessionId);
425    }
426
427    /**
428     * Class constructor with {@link AudioAttributes} and {@link AudioFormat}.
429     * @param attributes a non-null {@link AudioAttributes} instance.
430     * @param format a non-null {@link AudioFormat} instance describing the format of the data
431     *     that will be played through this AudioTrack. See {@link AudioFormat.Builder} for
432     *     configuring the audio format parameters such as encoding, channel mask and sample rate.
433     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
434     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
435     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
436     *   this is the maximum size of the sound that will be played for this instance.
437     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
438     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
439     *   smaller than getMinBufferSize() will result in an initialization failure.
440     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}.
441     * @param sessionId ID of audio session the AudioTrack must be attached to, or
442     *   {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction
443     *   time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before
444     *   construction.
445     * @throws IllegalArgumentException
446     */
447    public AudioTrack(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
448            int mode, int sessionId)
449                    throws IllegalArgumentException {
450        // mState already == STATE_UNINITIALIZED
451
452        if (attributes == null) {
453            throw new IllegalArgumentException("Illegal null AudioAttributes");
454        }
455        if (format == null) {
456            throw new IllegalArgumentException("Illegal null AudioFormat");
457        }
458
459        // remember which looper is associated with the AudioTrack instantiation
460        Looper looper;
461        if ((looper = Looper.myLooper()) == null) {
462            looper = Looper.getMainLooper();
463        }
464
465        int rate = 0;
466        if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE) != 0)
467        {
468            rate = format.getSampleRate();
469        } else {
470            rate = AudioSystem.getPrimaryOutputSamplingRate();
471            if (rate <= 0) {
472                rate = 44100;
473            }
474        }
475        int channelIndexMask = 0;
476        if ((format.getPropertySetMask()
477                & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
478            channelIndexMask = format.getChannelIndexMask();
479        }
480        int channelMask = 0;
481        if ((format.getPropertySetMask()
482                & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
483            channelMask = format.getChannelMask();
484        } else if (channelIndexMask == 0) { // if no masks at all, use stereo
485            channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT
486                    | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
487        }
488        int encoding = AudioFormat.ENCODING_DEFAULT;
489        if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) {
490            encoding = format.getEncoding();
491        }
492        audioParamCheck(rate, channelMask, channelIndexMask, encoding, mode);
493        mStreamType = AudioSystem.STREAM_DEFAULT;
494
495        audioBuffSizeCheck(bufferSizeInBytes);
496
497        mInitializationLooper = looper;
498        IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
499        mAppOps = IAppOpsService.Stub.asInterface(b);
500
501        mAttributes = new AudioAttributes.Builder(attributes).build();
502
503        if (sessionId < 0) {
504            throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
505        }
506
507        int[] session = new int[1];
508        session[0] = sessionId;
509        // native initialization
510        int initResult = native_setup(new WeakReference<AudioTrack>(this), mAttributes,
511                mSampleRate, mChannelMask, mChannelIndexMask, mAudioFormat,
512                mNativeBufferSizeInBytes, mDataLoadMode, session);
513        if (initResult != SUCCESS) {
514            loge("Error code "+initResult+" when initializing AudioTrack.");
515            return; // with mState == STATE_UNINITIALIZED
516        }
517
518        mSessionId = session[0];
519
520        if (mDataLoadMode == MODE_STATIC) {
521            mState = STATE_NO_STATIC_DATA;
522        } else {
523            mState = STATE_INITIALIZED;
524        }
525    }
526
527    /**
528     * A constructor which explicitly connects a Native (C++) AudioTrack. For use by
529     * the AudioTrackRoutingProxy subclass.
530     * @param nativeTrackInJavaObj a C/C++ pointer to a native AudioTrack
531     * (associated with an OpenSL ES player).
532     */
533    /*package*/ AudioTrack(long nativeTrackInJavaObj) {
534        mNativeTrackInJavaObj = nativeTrackInJavaObj;
535
536        // "final"s
537        mAttributes = null;
538        mAppOps = null;
539
540        // remember which looper is associated with the AudioTrack instantiation
541        Looper looper;
542        if ((looper = Looper.myLooper()) == null) {
543            looper = Looper.getMainLooper();
544        }
545        mInitializationLooper = looper;
546
547        // other initialization...
548
549        mState = STATE_INITIALIZED;
550    }
551
552    /**
553     * Builder class for {@link AudioTrack} objects.
554     * Use this class to configure and create an <code>AudioTrack</code> instance. By setting audio
555     * attributes and audio format parameters, you indicate which of those vary from the default
556     * behavior on the device.
557     * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
558     * parameters, to be used by a new <code>AudioTrack</code> instance:
559     *
560     * <pre class="prettyprint">
561     * AudioTrack player = new AudioTrack.Builder()
562     *         .setAudioAttributes(new AudioAttributes.Builder()
563     *                  .setUsage(AudioAttributes.USAGE_ALARM)
564     *                  .setContentType(CONTENT_TYPE_MUSIC)
565     *                  .build())
566     *         .setAudioFormat(new AudioFormat.Builder()
567     *                 .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
568     *                 .setSampleRate(441000)
569     *                 .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
570     *                 .build())
571     *         .setBufferSize(minBuffSize)
572     *         .build();
573     * </pre>
574     * <p>
575     * If the audio attributes are not set with {@link #setAudioAttributes(AudioAttributes)},
576     * attributes comprising {@link AudioAttributes#USAGE_MEDIA} will be used.
577     * <br>If the audio format is not specified or is incomplete, its sample rate will be the
578     * default output sample rate of the device (see
579     * {@link AudioManager#PROPERTY_OUTPUT_SAMPLE_RATE}), its channel configuration will be
580     * {@link AudioFormat#CHANNEL_OUT_STEREO} and the encoding will be
581     * {@link AudioFormat#ENCODING_PCM_16BIT}.
582     * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
583     * and the mode is {@link AudioTrack#MODE_STREAM}, the minimum buffer size is used.
584     * <br>If the transfer mode is not specified with {@link #setTransferMode(int)},
585     * <code>MODE_STREAM</code> will be used.
586     * <br>If the session ID is not specified with {@link #setSessionId(int)}, a new one will
587     * be generated.
588     */
589    public static class Builder {
590        private AudioAttributes mAttributes;
591        private AudioFormat mFormat;
592        private int mBufferSizeInBytes;
593        private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
594        private int mMode = MODE_STREAM;
595
596        /**
597         * Constructs a new Builder with the default values as described above.
598         */
599        public Builder() {
600        }
601
602        /**
603         * Sets the {@link AudioAttributes}.
604         * @param attributes a non-null {@link AudioAttributes} instance that describes the audio
605         *     data to be played.
606         * @return the same Builder instance.
607         * @throws IllegalArgumentException
608         */
609        public @NonNull Builder setAudioAttributes(@NonNull AudioAttributes attributes)
610                throws IllegalArgumentException {
611            if (attributes == null) {
612                throw new IllegalArgumentException("Illegal null AudioAttributes argument");
613            }
614            // keep reference, we only copy the data when building
615            mAttributes = attributes;
616            return this;
617        }
618
619        /**
620         * Sets the format of the audio data to be played by the {@link AudioTrack}.
621         * See {@link AudioFormat.Builder} for configuring the audio format parameters such
622         * as encoding, channel mask and sample rate.
623         * @param format a non-null {@link AudioFormat} instance.
624         * @return the same Builder instance.
625         * @throws IllegalArgumentException
626         */
627        public @NonNull Builder setAudioFormat(@NonNull AudioFormat format)
628                throws IllegalArgumentException {
629            if (format == null) {
630                throw new IllegalArgumentException("Illegal null AudioFormat argument");
631            }
632            // keep reference, we only copy the data when building
633            mFormat = format;
634            return this;
635        }
636
637        /**
638         * Sets the total size (in bytes) of the buffer where audio data is read from for playback.
639         * If using the {@link AudioTrack} in streaming mode
640         * (see {@link AudioTrack#MODE_STREAM}, you can write data into this buffer in smaller
641         * chunks than this size. See {@link #getMinBufferSize(int, int, int)} to determine
642         * the minimum required buffer size for the successful creation of an AudioTrack instance
643         * in streaming mode. Using values smaller than <code>getMinBufferSize()</code> will result
644         * in an exception when trying to build the <code>AudioTrack</code>.
645         * <br>If using the <code>AudioTrack</code> in static mode (see
646         * {@link AudioTrack#MODE_STATIC}), this is the maximum size of the sound that will be
647         * played by this instance.
648         * @param bufferSizeInBytes
649         * @return the same Builder instance.
650         * @throws IllegalArgumentException
651         */
652        public @NonNull Builder setBufferSizeInBytes(int bufferSizeInBytes)
653                throws IllegalArgumentException {
654            if (bufferSizeInBytes <= 0) {
655                throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
656            }
657            mBufferSizeInBytes = bufferSizeInBytes;
658            return this;
659        }
660
661        /**
662         * Sets the mode under which buffers of audio data are transferred from the
663         * {@link AudioTrack} to the framework.
664         * @param mode one of {@link AudioTrack#MODE_STREAM}, {@link AudioTrack#MODE_STATIC}.
665         * @return the same Builder instance.
666         * @throws IllegalArgumentException
667         */
668        public @NonNull Builder setTransferMode(@TransferMode int mode)
669                throws IllegalArgumentException {
670            switch(mode) {
671                case MODE_STREAM:
672                case MODE_STATIC:
673                    mMode = mode;
674                    break;
675                default:
676                    throw new IllegalArgumentException("Invalid transfer mode " + mode);
677            }
678            return this;
679        }
680
681        /**
682         * Sets the session ID the {@link AudioTrack} will be attached to.
683         * @param sessionId a strictly positive ID number retrieved from another
684         *     <code>AudioTrack</code> via {@link AudioTrack#getAudioSessionId()} or allocated by
685         *     {@link AudioManager} via {@link AudioManager#generateAudioSessionId()}, or
686         *     {@link AudioManager#AUDIO_SESSION_ID_GENERATE}.
687         * @return the same Builder instance.
688         * @throws IllegalArgumentException
689         */
690        public @NonNull Builder setSessionId(int sessionId)
691                throws IllegalArgumentException {
692            if ((sessionId != AudioManager.AUDIO_SESSION_ID_GENERATE) && (sessionId < 1)) {
693                throw new IllegalArgumentException("Invalid audio session ID " + sessionId);
694            }
695            mSessionId = sessionId;
696            return this;
697        }
698
699        /**
700         * Builds an {@link AudioTrack} instance initialized with all the parameters set
701         * on this <code>Builder</code>.
702         * @return a new successfully initialized {@link AudioTrack} instance.
703         * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
704         *     were incompatible, or if they are not supported by the device,
705         *     or if the device was not available.
706         */
707        public @NonNull AudioTrack build() throws UnsupportedOperationException {
708            if (mAttributes == null) {
709                mAttributes = new AudioAttributes.Builder()
710                        .setUsage(AudioAttributes.USAGE_MEDIA)
711                        .build();
712            }
713            if (mFormat == null) {
714                mFormat = new AudioFormat.Builder()
715                        .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
716                        .setSampleRate(AudioSystem.getPrimaryOutputSamplingRate())
717                        .setEncoding(AudioFormat.ENCODING_DEFAULT)
718                        .build();
719            }
720            try {
721                // If the buffer size is not specified in streaming mode,
722                // use a single frame for the buffer size and let the
723                // native code figure out the minimum buffer size.
724                if (mMode == MODE_STREAM && mBufferSizeInBytes == 0) {
725                    mBufferSizeInBytes = mFormat.getChannelCount()
726                            * mFormat.getBytesPerSample(mFormat.getEncoding());
727                }
728                final AudioTrack track = new AudioTrack(
729                        mAttributes, mFormat, mBufferSizeInBytes, mMode, mSessionId);
730                if (track.getState() == STATE_UNINITIALIZED) {
731                    // release is not necessary
732                    throw new UnsupportedOperationException("Cannot create AudioTrack");
733                }
734                return track;
735            } catch (IllegalArgumentException e) {
736                throw new UnsupportedOperationException(e.getMessage());
737            }
738        }
739    }
740
741    // mask of all the positional channels supported, however the allowed combinations
742    // are further restricted by the matching left/right rule and CHANNEL_COUNT_MAX
743    private static final int SUPPORTED_OUT_CHANNELS =
744            AudioFormat.CHANNEL_OUT_FRONT_LEFT |
745            AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
746            AudioFormat.CHANNEL_OUT_FRONT_CENTER |
747            AudioFormat.CHANNEL_OUT_LOW_FREQUENCY |
748            AudioFormat.CHANNEL_OUT_BACK_LEFT |
749            AudioFormat.CHANNEL_OUT_BACK_RIGHT |
750            AudioFormat.CHANNEL_OUT_BACK_CENTER |
751            AudioFormat.CHANNEL_OUT_SIDE_LEFT |
752            AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
753
754    // Convenience method for the constructor's parameter checks.
755    // This is where constructor IllegalArgumentException-s are thrown
756    // postconditions:
757    //    mChannelCount is valid
758    //    mChannelMask is valid
759    //    mAudioFormat is valid
760    //    mSampleRate is valid
761    //    mDataLoadMode is valid
762    private void audioParamCheck(int sampleRateInHz, int channelConfig, int channelIndexMask,
763                                 int audioFormat, int mode) {
764        //--------------
765        // sample rate, note these values are subject to change
766        if (sampleRateInHz < SAMPLE_RATE_HZ_MIN || sampleRateInHz > SAMPLE_RATE_HZ_MAX) {
767            throw new IllegalArgumentException(sampleRateInHz
768                    + "Hz is not a supported sample rate.");
769        }
770        mSampleRate = sampleRateInHz;
771
772        //--------------
773        // channel config
774        mChannelConfiguration = channelConfig;
775
776        switch (channelConfig) {
777        case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
778        case AudioFormat.CHANNEL_OUT_MONO:
779        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
780            mChannelCount = 1;
781            mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
782            break;
783        case AudioFormat.CHANNEL_OUT_STEREO:
784        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
785            mChannelCount = 2;
786            mChannelMask = AudioFormat.CHANNEL_OUT_STEREO;
787            break;
788        default:
789            if (channelConfig == AudioFormat.CHANNEL_INVALID && channelIndexMask != 0) {
790                mChannelCount = 0;
791                break; // channel index configuration only
792            }
793            if (!isMultichannelConfigSupported(channelConfig)) {
794                // input channel configuration features unsupported channels
795                throw new IllegalArgumentException("Unsupported channel configuration.");
796            }
797            mChannelMask = channelConfig;
798            mChannelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
799        }
800        // check the channel index configuration (if present)
801        mChannelIndexMask = channelIndexMask;
802        if (mChannelIndexMask != 0) {
803            // restrictive: indexMask could allow up to AUDIO_CHANNEL_BITS_LOG2
804            final int indexMask = (1 << CHANNEL_COUNT_MAX) - 1;
805            if ((channelIndexMask & ~indexMask) != 0) {
806                throw new IllegalArgumentException("Unsupported channel index configuration "
807                        + channelIndexMask);
808            }
809            int channelIndexCount = Integer.bitCount(channelIndexMask);
810            if (mChannelCount == 0) {
811                 mChannelCount = channelIndexCount;
812            } else if (mChannelCount != channelIndexCount) {
813                throw new IllegalArgumentException("Channel count must match");
814            }
815        }
816
817        //--------------
818        // audio format
819        if (audioFormat == AudioFormat.ENCODING_DEFAULT) {
820            audioFormat = AudioFormat.ENCODING_PCM_16BIT;
821        }
822
823        if (!AudioFormat.isPublicEncoding(audioFormat)) {
824            throw new IllegalArgumentException("Unsupported audio encoding.");
825        }
826        mAudioFormat = audioFormat;
827
828        //--------------
829        // audio load mode
830        if (((mode != MODE_STREAM) && (mode != MODE_STATIC)) ||
831                ((mode != MODE_STREAM) && !AudioFormat.isEncodingLinearPcm(mAudioFormat))) {
832            throw new IllegalArgumentException("Invalid mode.");
833        }
834        mDataLoadMode = mode;
835    }
836
837    /**
838     * Convenience method to check that the channel configuration (a.k.a channel mask) is supported
839     * @param channelConfig the mask to validate
840     * @return false if the AudioTrack can't be used with such a mask
841     */
842    private static boolean isMultichannelConfigSupported(int channelConfig) {
843        // check for unsupported channels
844        if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
845            loge("Channel configuration features unsupported channels");
846            return false;
847        }
848        final int channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
849        if (channelCount > CHANNEL_COUNT_MAX) {
850            loge("Channel configuration contains too many channels " +
851                    channelCount + ">" + CHANNEL_COUNT_MAX);
852            return false;
853        }
854        // check for unsupported multichannel combinations:
855        // - FL/FR must be present
856        // - L/R channels must be paired (e.g. no single L channel)
857        final int frontPair =
858                AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
859        if ((channelConfig & frontPair) != frontPair) {
860                loge("Front channels must be present in multichannel configurations");
861                return false;
862        }
863        final int backPair =
864                AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
865        if ((channelConfig & backPair) != 0) {
866            if ((channelConfig & backPair) != backPair) {
867                loge("Rear channels can't be used independently");
868                return false;
869            }
870        }
871        final int sidePair =
872                AudioFormat.CHANNEL_OUT_SIDE_LEFT | AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
873        if ((channelConfig & sidePair) != 0
874                && (channelConfig & sidePair) != sidePair) {
875            loge("Side channels can't be used independently");
876            return false;
877        }
878        return true;
879    }
880
881
882    // Convenience method for the constructor's audio buffer size check.
883    // preconditions:
884    //    mChannelCount is valid
885    //    mAudioFormat is valid
886    // postcondition:
887    //    mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
888    private void audioBuffSizeCheck(int audioBufferSize) {
889        // NB: this section is only valid with PCM data.
890        //     To update when supporting compressed formats
891        int frameSizeInBytes;
892        if (AudioFormat.isEncodingLinearPcm(mAudioFormat)) {
893            frameSizeInBytes = mChannelCount * AudioFormat.getBytesPerSample(mAudioFormat);
894        } else {
895            frameSizeInBytes = 1;
896        }
897        if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
898            throw new IllegalArgumentException("Invalid audio buffer size.");
899        }
900
901        mNativeBufferSizeInBytes = audioBufferSize;
902        mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
903    }
904
905
906    /**
907     * Releases the native AudioTrack resources.
908     */
909    public void release() {
910        // even though native_release() stops the native AudioTrack, we need to stop
911        // AudioTrack subclasses too.
912        try {
913            stop();
914        } catch(IllegalStateException ise) {
915            // don't raise an exception, we're releasing the resources.
916        }
917        native_release();
918        mState = STATE_UNINITIALIZED;
919    }
920
921    @Override
922    protected void finalize() {
923        native_finalize();
924    }
925
926    //--------------------------------------------------------------------------
927    // Getters
928    //--------------------
929    /**
930     * Returns the minimum gain value, which is the constant 0.0.
931     * Gain values less than 0.0 will be clamped to 0.0.
932     * <p>The word "volume" in the API name is historical; this is actually a linear gain.
933     * @return the minimum value, which is the constant 0.0.
934     */
935    static public float getMinVolume() {
936        return GAIN_MIN;
937    }
938
939    /**
940     * Returns the maximum gain value, which is greater than or equal to 1.0.
941     * Gain values greater than the maximum will be clamped to the maximum.
942     * <p>The word "volume" in the API name is historical; this is actually a gain.
943     * expressed as a linear multiplier on sample values, where a maximum value of 1.0
944     * corresponds to a gain of 0 dB (sample values left unmodified).
945     * @return the maximum value, which is greater than or equal to 1.0.
946     */
947    static public float getMaxVolume() {
948        return GAIN_MAX;
949    }
950
951    /**
952     * Returns the configured audio data sample rate in Hz
953     */
954    public int getSampleRate() {
955        return mSampleRate;
956    }
957
958    /**
959     * Returns the current playback sample rate rate in Hz.
960     */
961    public int getPlaybackRate() {
962        return native_get_playback_rate();
963    }
964
965    /**
966     * Returns the current playback parameters.
967     * See {@link #setPlaybackParams(PlaybackParams)} to set playback parameters
968     * @return current {@link PlaybackParams}.
969     * @throws IllegalStateException if track is not initialized.
970     */
971    public @NonNull PlaybackParams getPlaybackParams() {
972        return native_get_playback_params();
973    }
974
975    /**
976     * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
977     * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
978     */
979    public int getAudioFormat() {
980        return mAudioFormat;
981    }
982
983    /**
984     * Returns the type of audio stream this AudioTrack is configured for.
985     * Compare the result against {@link AudioManager#STREAM_VOICE_CALL},
986     * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
987     * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
988     * {@link AudioManager#STREAM_NOTIFICATION}, or {@link AudioManager#STREAM_DTMF}.
989     */
990    public int getStreamType() {
991        return mStreamType;
992    }
993
994    /**
995     * Returns the configured channel position mask.
996     * <p> For example, refer to {@link AudioFormat#CHANNEL_OUT_MONO},
997     * {@link AudioFormat#CHANNEL_OUT_STEREO}, {@link AudioFormat#CHANNEL_OUT_5POINT1}.
998     * This method may return {@link AudioFormat#CHANNEL_INVALID} if
999     * a channel index mask was used. Consider
1000     * {@link #getFormat()} instead, to obtain an {@link AudioFormat},
1001     * which contains both the channel position mask and the channel index mask.
1002     */
1003    public int getChannelConfiguration() {
1004        return mChannelConfiguration;
1005    }
1006
1007    /**
1008     * Returns the configured <code>AudioTrack</code> format.
1009     * @return an {@link AudioFormat} containing the
1010     * <code>AudioTrack</code> parameters at the time of configuration.
1011     */
1012    public @NonNull AudioFormat getFormat() {
1013        AudioFormat.Builder builder = new AudioFormat.Builder()
1014            .setSampleRate(mSampleRate)
1015            .setEncoding(mAudioFormat);
1016        if (mChannelConfiguration != AudioFormat.CHANNEL_INVALID) {
1017            builder.setChannelMask(mChannelConfiguration);
1018        }
1019        if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
1020            builder.setChannelIndexMask(mChannelIndexMask);
1021        }
1022        return builder.build();
1023    }
1024
1025    /**
1026     * Returns the configured number of channels.
1027     */
1028    public int getChannelCount() {
1029        return mChannelCount;
1030    }
1031
1032    /**
1033     * Returns the state of the AudioTrack instance. This is useful after the
1034     * AudioTrack instance has been created to check if it was initialized
1035     * properly. This ensures that the appropriate resources have been acquired.
1036     * @see #STATE_UNINITIALIZED
1037     * @see #STATE_INITIALIZED
1038     * @see #STATE_NO_STATIC_DATA
1039     */
1040    public int getState() {
1041        return mState;
1042    }
1043
1044    /**
1045     * Returns the playback state of the AudioTrack instance.
1046     * @see #PLAYSTATE_STOPPED
1047     * @see #PLAYSTATE_PAUSED
1048     * @see #PLAYSTATE_PLAYING
1049     */
1050    public int getPlayState() {
1051        synchronized (mPlayStateLock) {
1052            return mPlayState;
1053        }
1054    }
1055
1056    /**
1057     *  Returns the frame count of the native <code>AudioTrack</code> buffer.
1058     *  <p> If the track's creation mode is {@link #MODE_STATIC},
1059     *  it is equal to the specified bufferSizeInBytes on construction, converted to frame units.
1060     *  A static track's native frame count will not change.
1061     *  <p> If the track's creation mode is {@link #MODE_STREAM},
1062     *  it is greater than or equal to the specified bufferSizeInBytes converted to frame units.
1063     *  For streaming tracks, this value may be rounded up to a larger value if needed by
1064     *  the target output sink, and
1065     *  if the track is subsequently routed to a different output sink, the native
1066     *  frame count may enlarge to accommodate.
1067     *  <p> If the <code>AudioTrack</code> encoding indicates compressed data,
1068     *  e.g. {@link AudioFormat#ENCODING_AC3}, then the frame count returned is
1069     *  the size of the native <code>AudioTrack</code> buffer in bytes.
1070     *  <p> See also {@link AudioManager#getProperty(String)} for key
1071     *  {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
1072     *  @return current size in frames of the <code>AudioTrack</code> buffer.
1073     *  @throws IllegalStateException
1074     */
1075    public int getBufferSizeInFrames() {
1076        return native_get_native_frame_count();
1077    }
1078
1079    /**
1080     *  Returns the frame count of the native <code>AudioTrack</code> buffer.
1081     *  @return current size in frames of the <code>AudioTrack</code> buffer.
1082     *  @throws IllegalStateException
1083     *  @deprecated Use the identical public method {@link #getBufferSizeInFrames()} instead.
1084     */
1085    @Deprecated
1086    protected int getNativeFrameCount() {
1087        return native_get_native_frame_count();
1088    }
1089
1090    /**
1091     * Returns marker position expressed in frames.
1092     * @return marker position in wrapping frame units similar to {@link #getPlaybackHeadPosition},
1093     * or zero if marker is disabled.
1094     */
1095    public int getNotificationMarkerPosition() {
1096        return native_get_marker_pos();
1097    }
1098
1099    /**
1100     * Returns the notification update period expressed in frames.
1101     * Zero means that no position update notifications are being delivered.
1102     */
1103    public int getPositionNotificationPeriod() {
1104        return native_get_pos_update_period();
1105    }
1106
1107    /**
1108     * Returns the playback head position expressed in frames.
1109     * Though the "int" type is signed 32-bits, the value should be reinterpreted as if it is
1110     * unsigned 32-bits.  That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
1111     * This is a continuously advancing counter.  It will wrap (overflow) periodically,
1112     * for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
1113     * It is reset to zero by {@link #flush()}, {@link #reloadStaticData()}, and {@link #stop()}.
1114     * If the track's creation mode is {@link #MODE_STATIC}, the return value indicates
1115     * the total number of frames played since reset,
1116     * <i>not</i> the current offset within the buffer.
1117     */
1118    public int getPlaybackHeadPosition() {
1119        return native_get_position();
1120    }
1121
1122    /**
1123     * Returns this track's estimated latency in milliseconds. This includes the latency due
1124     * to AudioTrack buffer size, AudioMixer (if any) and audio hardware driver.
1125     *
1126     * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
1127     * a better solution.
1128     * @hide
1129     */
1130    public int getLatency() {
1131        return native_get_latency();
1132    }
1133
1134    /**
1135     *  Returns the output sample rate in Hz for the specified stream type.
1136     */
1137    static public int getNativeOutputSampleRate(int streamType) {
1138        return native_get_output_sample_rate(streamType);
1139    }
1140
1141    /**
1142     * Returns the minimum buffer size required for the successful creation of an AudioTrack
1143     * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
1144     * guarantee a smooth playback under load, and higher values should be chosen according to
1145     * the expected frequency at which the buffer will be refilled with additional data to play.
1146     * For example, if you intend to dynamically set the source sample rate of an AudioTrack
1147     * to a higher value than the initial source sample rate, be sure to configure the buffer size
1148     * based on the highest planned sample rate.
1149     * @param sampleRateInHz the source sample rate expressed in Hz.
1150     * @param channelConfig describes the configuration of the audio channels.
1151     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
1152     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
1153     * @param audioFormat the format in which the audio data is represented.
1154     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
1155     *   {@link AudioFormat#ENCODING_PCM_8BIT},
1156     *   and {@link AudioFormat#ENCODING_PCM_FLOAT}.
1157     * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
1158     *   or {@link #ERROR} if unable to query for output properties,
1159     *   or the minimum buffer size expressed in bytes.
1160     */
1161    static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
1162        int channelCount = 0;
1163        switch(channelConfig) {
1164        case AudioFormat.CHANNEL_OUT_MONO:
1165        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
1166            channelCount = 1;
1167            break;
1168        case AudioFormat.CHANNEL_OUT_STEREO:
1169        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
1170            channelCount = 2;
1171            break;
1172        default:
1173            if (!isMultichannelConfigSupported(channelConfig)) {
1174                loge("getMinBufferSize(): Invalid channel configuration.");
1175                return ERROR_BAD_VALUE;
1176            } else {
1177                channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
1178            }
1179        }
1180
1181        if (!AudioFormat.isPublicEncoding(audioFormat)) {
1182            loge("getMinBufferSize(): Invalid audio format.");
1183            return ERROR_BAD_VALUE;
1184        }
1185
1186        // sample rate, note these values are subject to change
1187        if ( (sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX) ) {
1188            loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
1189            return ERROR_BAD_VALUE;
1190        }
1191
1192        int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
1193        if (size <= 0) {
1194            loge("getMinBufferSize(): error querying hardware");
1195            return ERROR;
1196        }
1197        else {
1198            return size;
1199        }
1200    }
1201
1202    /**
1203     * Returns the audio session ID.
1204     *
1205     * @return the ID of the audio session this AudioTrack belongs to.
1206     */
1207    public int getAudioSessionId() {
1208        return mSessionId;
1209    }
1210
1211   /**
1212    * Poll for a timestamp on demand.
1213    * <p>
1214    * If you need to track timestamps during initial warmup or after a routing or mode change,
1215    * you should request a new timestamp periodically until the reported timestamps
1216    * show that the frame position is advancing, or until it becomes clear that
1217    * timestamps are unavailable for this route.
1218    * <p>
1219    * After the clock is advancing at a stable rate,
1220    * query for a new timestamp approximately once every 10 seconds to once per minute.
1221    * Calling this method more often is inefficient.
1222    * It is also counter-productive to call this method more often than recommended,
1223    * because the short-term differences between successive timestamp reports are not meaningful.
1224    * If you need a high-resolution mapping between frame position and presentation time,
1225    * consider implementing that at application level, based on low-resolution timestamps.
1226    * <p>
1227    * The audio data at the returned position may either already have been
1228    * presented, or may have not yet been presented but is committed to be presented.
1229    * It is not possible to request the time corresponding to a particular position,
1230    * or to request the (fractional) position corresponding to a particular time.
1231    * If you need such features, consider implementing them at application level.
1232    *
1233    * @param timestamp a reference to a non-null AudioTimestamp instance allocated
1234    *        and owned by caller.
1235    * @return true if a timestamp is available, or false if no timestamp is available.
1236    *         If a timestamp if available,
1237    *         the AudioTimestamp instance is filled in with a position in frame units, together
1238    *         with the estimated time when that frame was presented or is committed to
1239    *         be presented.
1240    *         In the case that no timestamp is available, any supplied instance is left unaltered.
1241    *         A timestamp may be temporarily unavailable while the audio clock is stabilizing,
1242    *         or during and immediately after a route change.
1243    *         A timestamp is permanently unavailable for a given route if the route does not support
1244    *         timestamps.  In this case, the approximate frame position can be obtained
1245    *         using {@link #getPlaybackHeadPosition}.
1246    *         However, it may be useful to continue to query for
1247    *         timestamps occasionally, to recover after a route change.
1248    */
1249    // Add this text when the "on new timestamp" API is added:
1250    //   Use if you need to get the most recent timestamp outside of the event callback handler.
1251    public boolean getTimestamp(AudioTimestamp timestamp)
1252    {
1253        if (timestamp == null) {
1254            throw new IllegalArgumentException();
1255        }
1256        // It's unfortunate, but we have to either create garbage every time or use synchronized
1257        long[] longArray = new long[2];
1258        int ret = native_get_timestamp(longArray);
1259        if (ret != SUCCESS) {
1260            return false;
1261        }
1262        timestamp.framePosition = longArray[0];
1263        timestamp.nanoTime = longArray[1];
1264        return true;
1265    }
1266
1267    /**
1268     * Poll for a timestamp on demand.
1269     * <p>
1270     * Same as {@link #getTimestamp(AudioTimestamp)} but with a more useful return code.
1271     *
1272     * @param timestamp a reference to a non-null AudioTimestamp instance allocated
1273     *        and owned by caller.
1274     * @return {@link #SUCCESS} if a timestamp is available
1275     *         {@link #ERROR_WOULD_BLOCK} if called in STOPPED or FLUSHED state, or if called
1276     *         immediately after start/ACTIVE, when the number of frames consumed is less than the
1277     *         overall hardware latency to physical output. In WOULD_BLOCK cases, one might poll
1278     *         again, or use {@link #getPlaybackHeadPosition}, or use 0 position and current time
1279     *         for the timestamp.
1280     *         {@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1281     *         needs to be recreated.
1282     *         {@link #ERROR_INVALID_OPERATION} if current route does not support
1283     *         timestamps. In this case, the approximate frame position can be obtained
1284     *         using {@link #getPlaybackHeadPosition}.
1285     *
1286     *         The AudioTimestamp instance is filled in with a position in frame units, together
1287     *         with the estimated time when that frame was presented or is committed to
1288     *         be presented.
1289     * @hide
1290     */
1291     // Add this text when the "on new timestamp" API is added:
1292     //   Use if you need to get the most recent timestamp outside of the event callback handler.
1293     public int getTimestampWithStatus(AudioTimestamp timestamp)
1294     {
1295         if (timestamp == null) {
1296             throw new IllegalArgumentException();
1297         }
1298         // It's unfortunate, but we have to either create garbage every time or use synchronized
1299         long[] longArray = new long[2];
1300         int ret = native_get_timestamp(longArray);
1301         timestamp.framePosition = longArray[0];
1302         timestamp.nanoTime = longArray[1];
1303         return ret;
1304     }
1305
1306    //--------------------------------------------------------------------------
1307    // Initialization / configuration
1308    //--------------------
1309    /**
1310     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
1311     * for each periodic playback head position update.
1312     * Notifications will be received in the same thread as the one in which the AudioTrack
1313     * instance was created.
1314     * @param listener
1315     */
1316    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
1317        setPlaybackPositionUpdateListener(listener, null);
1318    }
1319
1320    /**
1321     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
1322     * for each periodic playback head position update.
1323     * Use this method to receive AudioTrack events in the Handler associated with another
1324     * thread than the one in which you created the AudioTrack instance.
1325     * @param listener
1326     * @param handler the Handler that will receive the event notification messages.
1327     */
1328    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
1329                                                    Handler handler) {
1330        if (listener != null) {
1331            mEventHandlerDelegate = new NativePositionEventHandlerDelegate(this, listener, handler);
1332        } else {
1333            mEventHandlerDelegate = null;
1334        }
1335    }
1336
1337
1338    private static float clampGainOrLevel(float gainOrLevel) {
1339        if (Float.isNaN(gainOrLevel)) {
1340            throw new IllegalArgumentException();
1341        }
1342        if (gainOrLevel < GAIN_MIN) {
1343            gainOrLevel = GAIN_MIN;
1344        } else if (gainOrLevel > GAIN_MAX) {
1345            gainOrLevel = GAIN_MAX;
1346        }
1347        return gainOrLevel;
1348    }
1349
1350
1351     /**
1352     * Sets the specified left and right output gain values on the AudioTrack.
1353     * <p>Gain values are clamped to the closed interval [0.0, max] where
1354     * max is the value of {@link #getMaxVolume}.
1355     * A value of 0.0 results in zero gain (silence), and
1356     * a value of 1.0 means unity gain (signal unchanged).
1357     * The default value is 1.0 meaning unity gain.
1358     * <p>The word "volume" in the API name is historical; this is actually a linear gain.
1359     * @param leftGain output gain for the left channel.
1360     * @param rightGain output gain for the right channel
1361     * @return error code or success, see {@link #SUCCESS},
1362     *    {@link #ERROR_INVALID_OPERATION}
1363     * @deprecated Applications should use {@link #setVolume} instead, as it
1364     * more gracefully scales down to mono, and up to multi-channel content beyond stereo.
1365     */
1366    public int setStereoVolume(float leftGain, float rightGain) {
1367        if (isRestricted()) {
1368            return SUCCESS;
1369        }
1370        if (mState == STATE_UNINITIALIZED) {
1371            return ERROR_INVALID_OPERATION;
1372        }
1373
1374        leftGain = clampGainOrLevel(leftGain);
1375        rightGain = clampGainOrLevel(rightGain);
1376
1377        native_setVolume(leftGain, rightGain);
1378
1379        return SUCCESS;
1380    }
1381
1382
1383    /**
1384     * Sets the specified output gain value on all channels of this track.
1385     * <p>Gain values are clamped to the closed interval [0.0, max] where
1386     * max is the value of {@link #getMaxVolume}.
1387     * A value of 0.0 results in zero gain (silence), and
1388     * a value of 1.0 means unity gain (signal unchanged).
1389     * The default value is 1.0 meaning unity gain.
1390     * <p>This API is preferred over {@link #setStereoVolume}, as it
1391     * more gracefully scales down to mono, and up to multi-channel content beyond stereo.
1392     * <p>The word "volume" in the API name is historical; this is actually a linear gain.
1393     * @param gain output gain for all channels.
1394     * @return error code or success, see {@link #SUCCESS},
1395     *    {@link #ERROR_INVALID_OPERATION}
1396     */
1397    public int setVolume(float gain) {
1398        return setStereoVolume(gain, gain);
1399    }
1400
1401
1402    /**
1403     * Sets the playback sample rate for this track. This sets the sampling rate at which
1404     * the audio data will be consumed and played back
1405     * (as set by the sampleRateInHz parameter in the
1406     * {@link #AudioTrack(int, int, int, int, int, int)} constructor),
1407     * not the original sampling rate of the
1408     * content. For example, setting it to half the sample rate of the content will cause the
1409     * playback to last twice as long, but will also result in a pitch shift down by one octave.
1410     * The valid sample rate range is from 1 Hz to twice the value returned by
1411     * {@link #getNativeOutputSampleRate(int)}.
1412     * Use {@link #setPlaybackParams(PlaybackParams)} for speed control.
1413     * <p> This method may also be used to repurpose an existing <code>AudioTrack</code>
1414     * for playback of content of differing sample rate,
1415     * but with identical encoding and channel mask.
1416     * @param sampleRateInHz the sample rate expressed in Hz
1417     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1418     *    {@link #ERROR_INVALID_OPERATION}
1419     */
1420    public int setPlaybackRate(int sampleRateInHz) {
1421        if (mState != STATE_INITIALIZED) {
1422            return ERROR_INVALID_OPERATION;
1423        }
1424        if (sampleRateInHz <= 0) {
1425            return ERROR_BAD_VALUE;
1426        }
1427        return native_set_playback_rate(sampleRateInHz);
1428    }
1429
1430
1431    /**
1432     * Sets the playback parameters.
1433     * This method returns failure if it cannot apply the playback parameters.
1434     * One possible cause is that the parameters for speed or pitch are out of range.
1435     * Another possible cause is that the <code>AudioTrack</code> is streaming
1436     * (see {@link #MODE_STREAM}) and the
1437     * buffer size is too small. For speeds greater than 1.0f, the <code>AudioTrack</code> buffer
1438     * on configuration must be larger than the speed multiplied by the minimum size
1439     * {@link #getMinBufferSize(int, int, int)}) to allow proper playback.
1440     * @param params see {@link PlaybackParams}. In particular,
1441     * speed, pitch, and audio mode should be set.
1442     * @throws IllegalArgumentException if the parameters are invalid or not accepted.
1443     * @throws IllegalStateException if track is not initialized.
1444     */
1445    public void setPlaybackParams(@NonNull PlaybackParams params) {
1446        if (params == null) {
1447            throw new IllegalArgumentException("params is null");
1448        }
1449        native_set_playback_params(params);
1450    }
1451
1452
1453    /**
1454     * Sets the position of the notification marker.  At most one marker can be active.
1455     * @param markerInFrames marker position in wrapping frame units similar to
1456     * {@link #getPlaybackHeadPosition}, or zero to disable the marker.
1457     * To set a marker at a position which would appear as zero due to wraparound,
1458     * a workaround is to use a non-zero position near zero, such as -1 or 1.
1459     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1460     *  {@link #ERROR_INVALID_OPERATION}
1461     */
1462    public int setNotificationMarkerPosition(int markerInFrames) {
1463        if (mState == STATE_UNINITIALIZED) {
1464            return ERROR_INVALID_OPERATION;
1465        }
1466        return native_set_marker_pos(markerInFrames);
1467    }
1468
1469
1470    /**
1471     * Sets the period for the periodic notification event.
1472     * @param periodInFrames update period expressed in frames.
1473     * Zero period means no position updates.  A negative period is not allowed.
1474     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
1475     */
1476    public int setPositionNotificationPeriod(int periodInFrames) {
1477        if (mState == STATE_UNINITIALIZED) {
1478            return ERROR_INVALID_OPERATION;
1479        }
1480        return native_set_pos_update_period(periodInFrames);
1481    }
1482
1483
1484    /**
1485     * Sets the playback head position within the static buffer.
1486     * The track must be stopped or paused for the position to be changed,
1487     * and must use the {@link #MODE_STATIC} mode.
1488     * @param positionInFrames playback head position within buffer, expressed in frames.
1489     * Zero corresponds to start of buffer.
1490     * The position must not be greater than the buffer size in frames, or negative.
1491     * Though this method and {@link #getPlaybackHeadPosition()} have similar names,
1492     * the position values have different meanings.
1493     * <br>
1494     * If looping is currently enabled and the new position is greater than or equal to the
1495     * loop end marker, the behavior varies by API level:
1496     * as of {@link android.os.Build.VERSION_CODES#M},
1497     * the looping is first disabled and then the position is set.
1498     * For earlier API levels, the behavior is unspecified.
1499     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1500     *    {@link #ERROR_INVALID_OPERATION}
1501     */
1502    public int setPlaybackHeadPosition(int positionInFrames) {
1503        if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
1504                getPlayState() == PLAYSTATE_PLAYING) {
1505            return ERROR_INVALID_OPERATION;
1506        }
1507        if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
1508            return ERROR_BAD_VALUE;
1509        }
1510        return native_set_position(positionInFrames);
1511    }
1512
1513    /**
1514     * Sets the loop points and the loop count. The loop can be infinite.
1515     * Similarly to setPlaybackHeadPosition,
1516     * the track must be stopped or paused for the loop points to be changed,
1517     * and must use the {@link #MODE_STATIC} mode.
1518     * @param startInFrames loop start marker expressed in frames.
1519     * Zero corresponds to start of buffer.
1520     * The start marker must not be greater than or equal to the buffer size in frames, or negative.
1521     * @param endInFrames loop end marker expressed in frames.
1522     * The total buffer size in frames corresponds to end of buffer.
1523     * The end marker must not be greater than the buffer size in frames.
1524     * For looping, the end marker must not be less than or equal to the start marker,
1525     * but to disable looping
1526     * it is permitted for start marker, end marker, and loop count to all be 0.
1527     * If any input parameters are out of range, this method returns {@link #ERROR_BAD_VALUE}.
1528     * If the loop period (endInFrames - startInFrames) is too small for the implementation to
1529     * support,
1530     * {@link #ERROR_BAD_VALUE} is returned.
1531     * The loop range is the interval [startInFrames, endInFrames).
1532     * <br>
1533     * As of {@link android.os.Build.VERSION_CODES#M}, the position is left unchanged,
1534     * unless it is greater than or equal to the loop end marker, in which case
1535     * it is forced to the loop start marker.
1536     * For earlier API levels, the effect on position is unspecified.
1537     * @param loopCount the number of times the loop is looped; must be greater than or equal to -1.
1538     *    A value of -1 means infinite looping, and 0 disables looping.
1539     *    A value of positive N means to "loop" (go back) N times.  For example,
1540     *    a value of one means to play the region two times in total.
1541     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1542     *    {@link #ERROR_INVALID_OPERATION}
1543     */
1544    public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
1545        if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
1546                getPlayState() == PLAYSTATE_PLAYING) {
1547            return ERROR_INVALID_OPERATION;
1548        }
1549        if (loopCount == 0) {
1550            ;   // explicitly allowed as an exception to the loop region range check
1551        } else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames &&
1552                startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
1553            return ERROR_BAD_VALUE;
1554        }
1555        return native_set_loop(startInFrames, endInFrames, loopCount);
1556    }
1557
1558    /**
1559     * Sets the initialization state of the instance. This method was originally intended to be used
1560     * in an AudioTrack subclass constructor to set a subclass-specific post-initialization state.
1561     * However, subclasses of AudioTrack are no longer recommended, so this method is obsolete.
1562     * @param state the state of the AudioTrack instance
1563     * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
1564     */
1565    @Deprecated
1566    protected void setState(int state) {
1567        mState = state;
1568    }
1569
1570
1571    //---------------------------------------------------------
1572    // Transport control methods
1573    //--------------------
1574    /**
1575     * Starts playing an AudioTrack.
1576     * <p>
1577     * If track's creation mode is {@link #MODE_STATIC}, you must have called one of
1578     * the write methods ({@link #write(byte[], int, int)}, {@link #write(byte[], int, int, int)},
1579     * {@link #write(short[], int, int)}, {@link #write(short[], int, int, int)},
1580     * {@link #write(float[], int, int, int)}, or {@link #write(ByteBuffer, int, int)}) prior to
1581     * play().
1582     * <p>
1583     * If the mode is {@link #MODE_STREAM}, you can optionally prime the data path prior to
1584     * calling play(), by writing up to <code>bufferSizeInBytes</code> (from constructor).
1585     * If you don't call write() first, or if you call write() but with an insufficient amount of
1586     * data, then the track will be in underrun state at play().  In this case,
1587     * playback will not actually start playing until the data path is filled to a
1588     * device-specific minimum level.  This requirement for the path to be filled
1589     * to a minimum level is also true when resuming audio playback after calling stop().
1590     * Similarly the buffer will need to be filled up again after
1591     * the track underruns due to failure to call write() in a timely manner with sufficient data.
1592     * For portability, an application should prime the data path to the maximum allowed
1593     * by writing data until the write() method returns a short transfer count.
1594     * This allows play() to start immediately, and reduces the chance of underrun.
1595     *
1596     * @throws IllegalStateException if the track isn't properly initialized
1597     */
1598    public void play()
1599    throws IllegalStateException {
1600        if (mState != STATE_INITIALIZED) {
1601            throw new IllegalStateException("play() called on uninitialized AudioTrack.");
1602        }
1603        if (isRestricted()) {
1604            setVolume(0);
1605        }
1606        synchronized(mPlayStateLock) {
1607            native_start();
1608            mPlayState = PLAYSTATE_PLAYING;
1609        }
1610    }
1611
1612    private boolean isRestricted() {
1613        if ((mAttributes.getAllFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
1614            return false;
1615        }
1616        try {
1617            final int usage = AudioAttributes.usageForLegacyStreamType(mStreamType);
1618            final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, usage,
1619                    Process.myUid(), ActivityThread.currentPackageName());
1620            return mode != AppOpsManager.MODE_ALLOWED;
1621        } catch (RemoteException e) {
1622            return false;
1623        }
1624    }
1625
1626    /**
1627     * Stops playing the audio data.
1628     * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing
1629     * after the last buffer that was written has been played. For an immediate stop, use
1630     * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played
1631     * back yet.
1632     * @throws IllegalStateException
1633     */
1634    public void stop()
1635    throws IllegalStateException {
1636        if (mState != STATE_INITIALIZED) {
1637            throw new IllegalStateException("stop() called on uninitialized AudioTrack.");
1638        }
1639
1640        // stop playing
1641        synchronized(mPlayStateLock) {
1642            native_stop();
1643            mPlayState = PLAYSTATE_STOPPED;
1644            mAvSyncHeader = null;
1645            mAvSyncBytesRemaining = 0;
1646        }
1647    }
1648
1649    /**
1650     * Pauses the playback of the audio data. Data that has not been played
1651     * back will not be discarded. Subsequent calls to {@link #play} will play
1652     * this data back. See {@link #flush()} to discard this data.
1653     *
1654     * @throws IllegalStateException
1655     */
1656    public void pause()
1657    throws IllegalStateException {
1658        if (mState != STATE_INITIALIZED) {
1659            throw new IllegalStateException("pause() called on uninitialized AudioTrack.");
1660        }
1661        //logd("pause()");
1662
1663        // pause playback
1664        synchronized(mPlayStateLock) {
1665            native_pause();
1666            mPlayState = PLAYSTATE_PAUSED;
1667        }
1668    }
1669
1670
1671    //---------------------------------------------------------
1672    // Audio data supply
1673    //--------------------
1674
1675    /**
1676     * Flushes the audio data currently queued for playback. Any data that has
1677     * been written but not yet presented will be discarded.  No-op if not stopped or paused,
1678     * or if the track's creation mode is not {@link #MODE_STREAM}.
1679     * <BR> Note that although data written but not yet presented is discarded, there is no
1680     * guarantee that all of the buffer space formerly used by that data
1681     * is available for a subsequent write.
1682     * For example, a call to {@link #write(byte[], int, int)} with <code>sizeInBytes</code>
1683     * less than or equal to the total buffer size
1684     * may return a short actual transfer count.
1685     */
1686    public void flush() {
1687        if (mState == STATE_INITIALIZED) {
1688            // flush the data in native layer
1689            native_flush();
1690            mAvSyncHeader = null;
1691            mAvSyncBytesRemaining = 0;
1692        }
1693
1694    }
1695
1696    /**
1697     * Writes the audio data to the audio sink for playback (streaming mode),
1698     * or copies audio data for later playback (static buffer mode).
1699     * The format specified in the AudioTrack constructor should be
1700     * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
1701     * <p>
1702     * In streaming mode, the write will normally block until all the data has been enqueued for
1703     * playback, and will return a full transfer count.  However, if the track is stopped or paused
1704     * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
1705     * occurs during the write, then the write may return a short transfer count.
1706     * <p>
1707     * In static buffer mode, copies the data to the buffer starting at offset 0.
1708     * Note that the actual playback of this data might occur after this function returns.
1709     *
1710     * @param audioData the array that holds the data to play.
1711     * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
1712     *    starts.
1713     * @param sizeInBytes the number of bytes to read in audioData after the offset.
1714     * @return zero or the positive number of bytes that were written, or
1715     *    {@link #ERROR_INVALID_OPERATION}
1716     *    if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1717     *    the parameters don't resolve to valid data and indexes, or
1718     *    {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1719     *    needs to be recreated.
1720     *    The dead object error code is not returned if some data was successfully transferred.
1721     *    In this case, the error is returned at the next write().
1722     *
1723     * This is equivalent to {@link #write(byte[], int, int, int)} with <code>writeMode</code>
1724     * set to  {@link #WRITE_BLOCKING}.
1725     */
1726    public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
1727        return write(audioData, offsetInBytes, sizeInBytes, WRITE_BLOCKING);
1728    }
1729
1730    /**
1731     * Writes the audio data to the audio sink for playback (streaming mode),
1732     * or copies audio data for later playback (static buffer mode).
1733     * The format specified in the AudioTrack constructor should be
1734     * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
1735     * <p>
1736     * In streaming mode, the blocking behavior depends on the write mode.  If the write mode is
1737     * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
1738     * for playback, and will return a full transfer count.  However, if the write mode is
1739     * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
1740     * interrupts the write by calling stop or pause, or an I/O error
1741     * occurs during the write, then the write may return a short transfer count.
1742     * <p>
1743     * In static buffer mode, copies the data to the buffer starting at offset 0,
1744     * and the write mode is ignored.
1745     * Note that the actual playback of this data might occur after this function returns.
1746     *
1747     * @param audioData the array that holds the data to play.
1748     * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
1749     *    starts.
1750     * @param sizeInBytes the number of bytes to read in audioData after the offset.
1751     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
1752     *     effect in static mode.
1753     *     <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
1754     *         to the audio sink.
1755     *     <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
1756     *     queuing as much audio data for playback as possible without blocking.
1757     * @return zero or the positive number of bytes that were written, or
1758     *    {@link #ERROR_INVALID_OPERATION}
1759     *    if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1760     *    the parameters don't resolve to valid data and indexes, or
1761     *    {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1762     *    needs to be recreated.
1763     *    The dead object error code is not returned if some data was successfully transferred.
1764     *    In this case, the error is returned at the next write().
1765     */
1766    public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
1767            @WriteMode int writeMode) {
1768
1769        if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
1770            return ERROR_INVALID_OPERATION;
1771        }
1772
1773        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
1774            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
1775            return ERROR_BAD_VALUE;
1776        }
1777
1778        if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
1779                || (offsetInBytes + sizeInBytes < 0)    // detect integer overflow
1780                || (offsetInBytes + sizeInBytes > audioData.length)) {
1781            return ERROR_BAD_VALUE;
1782        }
1783
1784        int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat,
1785                writeMode == WRITE_BLOCKING);
1786
1787        if ((mDataLoadMode == MODE_STATIC)
1788                && (mState == STATE_NO_STATIC_DATA)
1789                && (ret > 0)) {
1790            // benign race with respect to other APIs that read mState
1791            mState = STATE_INITIALIZED;
1792        }
1793
1794        return ret;
1795    }
1796
1797    /**
1798     * Writes the audio data to the audio sink for playback (streaming mode),
1799     * or copies audio data for later playback (static buffer mode).
1800     * The format specified in the AudioTrack constructor should be
1801     * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
1802     * <p>
1803     * In streaming mode, the write will normally block until all the data has been enqueued for
1804     * playback, and will return a full transfer count.  However, if the track is stopped or paused
1805     * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
1806     * occurs during the write, then the write may return a short transfer count.
1807     * <p>
1808     * In static buffer mode, copies the data to the buffer starting at offset 0.
1809     * Note that the actual playback of this data might occur after this function returns.
1810     *
1811     * @param audioData the array that holds the data to play.
1812     * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
1813     *     starts.
1814     * @param sizeInShorts the number of shorts to read in audioData after the offset.
1815     * @return zero or the positive number of shorts that were written, or
1816     *    {@link #ERROR_INVALID_OPERATION}
1817     *    if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1818     *    the parameters don't resolve to valid data and indexes, or
1819     *    {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1820     *    needs to be recreated.
1821     *    The dead object error code is not returned if some data was successfully transferred.
1822     *    In this case, the error is returned at the next write().
1823     *
1824     * This is equivalent to {@link #write(short[], int, int, int)} with <code>writeMode</code>
1825     * set to  {@link #WRITE_BLOCKING}.
1826     */
1827    public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
1828        return write(audioData, offsetInShorts, sizeInShorts, WRITE_BLOCKING);
1829    }
1830
1831    /**
1832     * Writes the audio data to the audio sink for playback (streaming mode),
1833     * or copies audio data for later playback (static buffer mode).
1834     * The format specified in the AudioTrack constructor should be
1835     * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
1836     * <p>
1837     * In streaming mode, the blocking behavior depends on the write mode.  If the write mode is
1838     * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
1839     * for playback, and will return a full transfer count.  However, if the write mode is
1840     * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
1841     * interrupts the write by calling stop or pause, or an I/O error
1842     * occurs during the write, then the write may return a short transfer count.
1843     * <p>
1844     * In static buffer mode, copies the data to the buffer starting at offset 0.
1845     * Note that the actual playback of this data might occur after this function returns.
1846     *
1847     * @param audioData the array that holds the data to play.
1848     * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
1849     *     starts.
1850     * @param sizeInShorts the number of shorts to read in audioData after the offset.
1851     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
1852     *     effect in static mode.
1853     *     <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
1854     *         to the audio sink.
1855     *     <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
1856     *     queuing as much audio data for playback as possible without blocking.
1857     * @return zero or the positive number of shorts that were written, or
1858     *    {@link #ERROR_INVALID_OPERATION}
1859     *    if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1860     *    the parameters don't resolve to valid data and indexes, or
1861     *    {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1862     *    needs to be recreated.
1863     *    The dead object error code is not returned if some data was successfully transferred.
1864     *    In this case, the error is returned at the next write().
1865     */
1866    public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
1867            @WriteMode int writeMode) {
1868
1869        if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
1870            return ERROR_INVALID_OPERATION;
1871        }
1872
1873        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
1874            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
1875            return ERROR_BAD_VALUE;
1876        }
1877
1878        if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
1879                || (offsetInShorts + sizeInShorts < 0)  // detect integer overflow
1880                || (offsetInShorts + sizeInShorts > audioData.length)) {
1881            return ERROR_BAD_VALUE;
1882        }
1883
1884        int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat,
1885                writeMode == WRITE_BLOCKING);
1886
1887        if ((mDataLoadMode == MODE_STATIC)
1888                && (mState == STATE_NO_STATIC_DATA)
1889                && (ret > 0)) {
1890            // benign race with respect to other APIs that read mState
1891            mState = STATE_INITIALIZED;
1892        }
1893
1894        return ret;
1895    }
1896
1897    /**
1898     * Writes the audio data to the audio sink for playback (streaming mode),
1899     * or copies audio data for later playback (static buffer mode).
1900     * The format specified in the AudioTrack constructor should be
1901     * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
1902     * <p>
1903     * In streaming mode, the blocking behavior depends on the write mode.  If the write mode is
1904     * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
1905     * for playback, and will return a full transfer count.  However, if the write mode is
1906     * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
1907     * interrupts the write by calling stop or pause, or an I/O error
1908     * occurs during the write, then the write may return a short transfer count.
1909     * <p>
1910     * In static buffer mode, copies the data to the buffer starting at offset 0,
1911     * and the write mode is ignored.
1912     * Note that the actual playback of this data might occur after this function returns.
1913     *
1914     * @param audioData the array that holds the data to play.
1915     *     The implementation does not clip for sample values within the nominal range
1916     *     [-1.0f, 1.0f], provided that all gains in the audio pipeline are
1917     *     less than or equal to unity (1.0f), and in the absence of post-processing effects
1918     *     that could add energy, such as reverb.  For the convenience of applications
1919     *     that compute samples using filters with non-unity gain,
1920     *     sample values +3 dB beyond the nominal range are permitted.
1921     *     However such values may eventually be limited or clipped, depending on various gains
1922     *     and later processing in the audio path.  Therefore applications are encouraged
1923     *     to provide samples values within the nominal range.
1924     * @param offsetInFloats the offset, expressed as a number of floats,
1925     *     in audioData where the data to play starts.
1926     * @param sizeInFloats the number of floats to read in audioData after the offset.
1927     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
1928     *     effect in static mode.
1929     *     <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
1930     *         to the audio sink.
1931     *     <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
1932     *     queuing as much audio data for playback as possible without blocking.
1933     * @return zero or the positive number of floats that were written, or
1934     *    {@link #ERROR_INVALID_OPERATION}
1935     *    if the track isn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1936     *    the parameters don't resolve to valid data and indexes, or
1937     *    {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
1938     *    needs to be recreated.
1939     *    The dead object error code is not returned if some data was successfully transferred.
1940     *    In this case, the error is returned at the next write().
1941     */
1942    public int write(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
1943            @WriteMode int writeMode) {
1944
1945        if (mState == STATE_UNINITIALIZED) {
1946            Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
1947            return ERROR_INVALID_OPERATION;
1948        }
1949
1950        if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
1951            Log.e(TAG, "AudioTrack.write(float[] ...) requires format ENCODING_PCM_FLOAT");
1952            return ERROR_INVALID_OPERATION;
1953        }
1954
1955        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
1956            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
1957            return ERROR_BAD_VALUE;
1958        }
1959
1960        if ( (audioData == null) || (offsetInFloats < 0 ) || (sizeInFloats < 0)
1961                || (offsetInFloats + sizeInFloats < 0)  // detect integer overflow
1962                || (offsetInFloats + sizeInFloats > audioData.length)) {
1963            Log.e(TAG, "AudioTrack.write() called with invalid array, offset, or size");
1964            return ERROR_BAD_VALUE;
1965        }
1966
1967        int ret = native_write_float(audioData, offsetInFloats, sizeInFloats, mAudioFormat,
1968                writeMode == WRITE_BLOCKING);
1969
1970        if ((mDataLoadMode == MODE_STATIC)
1971                && (mState == STATE_NO_STATIC_DATA)
1972                && (ret > 0)) {
1973            // benign race with respect to other APIs that read mState
1974            mState = STATE_INITIALIZED;
1975        }
1976
1977        return ret;
1978    }
1979
1980
1981    /**
1982     * Writes the audio data to the audio sink for playback (streaming mode),
1983     * or copies audio data for later playback (static buffer mode).
1984     * The audioData in ByteBuffer should match the format specified in the AudioTrack constructor.
1985     * <p>
1986     * In streaming mode, the blocking behavior depends on the write mode.  If the write mode is
1987     * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
1988     * for playback, and will return a full transfer count.  However, if the write mode is
1989     * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
1990     * interrupts the write by calling stop or pause, or an I/O error
1991     * occurs during the write, then the write may return a short transfer count.
1992     * <p>
1993     * In static buffer mode, copies the data to the buffer starting at offset 0,
1994     * and the write mode is ignored.
1995     * Note that the actual playback of this data might occur after this function returns.
1996     *
1997     * @param audioData the buffer that holds the data to play, starting at the position reported
1998     *     by <code>audioData.position()</code>.
1999     *     <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
2000     *     have been advanced to reflect the amount of data that was successfully written to
2001     *     the AudioTrack.
2002     * @param sizeInBytes number of bytes to write.
2003     *     <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
2004     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
2005     *     effect in static mode.
2006     *     <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
2007     *         to the audio sink.
2008     *     <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
2009     *     queuing as much audio data for playback as possible without blocking.
2010     * @return zero or the positive number of bytes that were written, or
2011     *     {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
2012     *     {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
2013     *     needs to be recreated.
2014     *     The dead object error code is not returned if some data was successfully transferred.
2015     *     In this case, the error is returned at the next write().
2016     */
2017    public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
2018            @WriteMode int writeMode) {
2019
2020        if (mState == STATE_UNINITIALIZED) {
2021            Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
2022            return ERROR_INVALID_OPERATION;
2023        }
2024
2025        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
2026            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
2027            return ERROR_BAD_VALUE;
2028        }
2029
2030        if ( (audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
2031            Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
2032            return ERROR_BAD_VALUE;
2033        }
2034
2035        int ret = 0;
2036        if (audioData.isDirect()) {
2037            ret = native_write_native_bytes(audioData,
2038                    audioData.position(), sizeInBytes, mAudioFormat,
2039                    writeMode == WRITE_BLOCKING);
2040        } else {
2041            ret = native_write_byte(NioUtils.unsafeArray(audioData),
2042                    NioUtils.unsafeArrayOffset(audioData) + audioData.position(),
2043                    sizeInBytes, mAudioFormat,
2044                    writeMode == WRITE_BLOCKING);
2045        }
2046
2047        if ((mDataLoadMode == MODE_STATIC)
2048                && (mState == STATE_NO_STATIC_DATA)
2049                && (ret > 0)) {
2050            // benign race with respect to other APIs that read mState
2051            mState = STATE_INITIALIZED;
2052        }
2053
2054        if (ret > 0) {
2055            audioData.position(audioData.position() + ret);
2056        }
2057
2058        return ret;
2059    }
2060
2061    /**
2062     * Writes the audio data to the audio sink for playback in streaming mode on a HW_AV_SYNC track.
2063     * The blocking behavior will depend on the write mode.
2064     * @param audioData the buffer that holds the data to play, starting at the position reported
2065     *     by <code>audioData.position()</code>.
2066     *     <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
2067     *     have been advanced to reflect the amount of data that was successfully written to
2068     *     the AudioTrack.
2069     * @param sizeInBytes number of bytes to write.
2070     *     <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
2071     * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}.
2072     *     <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
2073     *         to the audio sink.
2074     *     <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
2075     *     queuing as much audio data for playback as possible without blocking.
2076     * @param timestamp The timestamp of the first decodable audio frame in the provided audioData.
2077     * @return zero or a positive number of bytes that were written, or
2078     *     {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}, or
2079     *     {@link AudioManager#ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
2080     *     needs to be recreated.
2081     *     The dead object error code is not returned if some data was successfully transferred.
2082     *     In this case, the error is returned at the next write().
2083     */
2084    public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
2085            @WriteMode int writeMode, long timestamp) {
2086
2087        if (mState == STATE_UNINITIALIZED) {
2088            Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
2089            return ERROR_INVALID_OPERATION;
2090        }
2091
2092        if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
2093            Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
2094            return ERROR_BAD_VALUE;
2095        }
2096
2097        if (mDataLoadMode != MODE_STREAM) {
2098            Log.e(TAG, "AudioTrack.write() with timestamp called for non-streaming mode track");
2099            return ERROR_INVALID_OPERATION;
2100        }
2101
2102        if ((mAttributes.getFlags() & AudioAttributes.FLAG_HW_AV_SYNC) == 0) {
2103            Log.d(TAG, "AudioTrack.write() called on a regular AudioTrack. Ignoring pts...");
2104            return write(audioData, sizeInBytes, writeMode);
2105        }
2106
2107        if ((audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
2108            Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
2109            return ERROR_BAD_VALUE;
2110        }
2111
2112        // create timestamp header if none exists
2113        if (mAvSyncHeader == null) {
2114            mAvSyncHeader = ByteBuffer.allocate(16);
2115            mAvSyncHeader.order(ByteOrder.BIG_ENDIAN);
2116            mAvSyncHeader.putInt(0x55550001);
2117            mAvSyncHeader.putInt(sizeInBytes);
2118            mAvSyncHeader.putLong(timestamp);
2119            mAvSyncHeader.position(0);
2120            mAvSyncBytesRemaining = sizeInBytes;
2121        }
2122
2123        // write timestamp header if not completely written already
2124        int ret = 0;
2125        if (mAvSyncHeader.remaining() != 0) {
2126            ret = write(mAvSyncHeader, mAvSyncHeader.remaining(), writeMode);
2127            if (ret < 0) {
2128                Log.e(TAG, "AudioTrack.write() could not write timestamp header!");
2129                mAvSyncHeader = null;
2130                mAvSyncBytesRemaining = 0;
2131                return ret;
2132            }
2133            if (mAvSyncHeader.remaining() > 0) {
2134                Log.v(TAG, "AudioTrack.write() partial timestamp header written.");
2135                return 0;
2136            }
2137        }
2138
2139        // write audio data
2140        int sizeToWrite = Math.min(mAvSyncBytesRemaining, sizeInBytes);
2141        ret = write(audioData, sizeToWrite, writeMode);
2142        if (ret < 0) {
2143            Log.e(TAG, "AudioTrack.write() could not write audio data!");
2144            mAvSyncHeader = null;
2145            mAvSyncBytesRemaining = 0;
2146            return ret;
2147        }
2148
2149        mAvSyncBytesRemaining -= ret;
2150        if (mAvSyncBytesRemaining == 0) {
2151            mAvSyncHeader = null;
2152        }
2153
2154        return ret;
2155    }
2156
2157
2158    /**
2159     * Sets the playback head position within the static buffer to zero,
2160     * that is it rewinds to start of static buffer.
2161     * The track must be stopped or paused, and
2162     * the track's creation mode must be {@link #MODE_STATIC}.
2163     * <p>
2164     * As of {@link android.os.Build.VERSION_CODES#M}, also resets the value returned by
2165     * {@link #getPlaybackHeadPosition()} to zero.
2166     * For earlier API levels, the reset behavior is unspecified.
2167     * <p>
2168     * Use {@link #setPlaybackHeadPosition(int)} with a zero position
2169     * if the reset of <code>getPlaybackHeadPosition()</code> is not needed.
2170     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
2171     *  {@link #ERROR_INVALID_OPERATION}
2172     */
2173    public int reloadStaticData() {
2174        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
2175            return ERROR_INVALID_OPERATION;
2176        }
2177        return native_reload_static();
2178    }
2179
2180    //--------------------------------------------------------------------------
2181    // Audio effects management
2182    //--------------------
2183
2184    /**
2185     * Attaches an auxiliary effect to the audio track. A typical auxiliary
2186     * effect is a reverberation effect which can be applied on any sound source
2187     * that directs a certain amount of its energy to this effect. This amount
2188     * is defined by setAuxEffectSendLevel().
2189     * {@see #setAuxEffectSendLevel(float)}.
2190     * <p>After creating an auxiliary effect (e.g.
2191     * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
2192     * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling
2193     * this method to attach the audio track to the effect.
2194     * <p>To detach the effect from the audio track, call this method with a
2195     * null effect id.
2196     *
2197     * @param effectId system wide unique id of the effect to attach
2198     * @return error code or success, see {@link #SUCCESS},
2199     *    {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE}
2200     */
2201    public int attachAuxEffect(int effectId) {
2202        if (mState == STATE_UNINITIALIZED) {
2203            return ERROR_INVALID_OPERATION;
2204        }
2205        return native_attachAuxEffect(effectId);
2206    }
2207
2208    /**
2209     * Sets the send level of the audio track to the attached auxiliary effect
2210     * {@link #attachAuxEffect(int)}.  Effect levels
2211     * are clamped to the closed interval [0.0, max] where
2212     * max is the value of {@link #getMaxVolume}.
2213     * A value of 0.0 results in no effect, and a value of 1.0 is full send.
2214     * <p>By default the send level is 0.0f, so even if an effect is attached to the player
2215     * this method must be called for the effect to be applied.
2216     * <p>Note that the passed level value is a linear scalar. UI controls should be scaled
2217     * logarithmically: the gain applied by audio framework ranges from -72dB to at least 0dB,
2218     * so an appropriate conversion from linear UI input x to level is:
2219     * x == 0 -&gt; level = 0
2220     * 0 &lt; x &lt;= R -&gt; level = 10^(72*(x-R)/20/R)
2221     *
2222     * @param level linear send level
2223     * @return error code or success, see {@link #SUCCESS},
2224     *    {@link #ERROR_INVALID_OPERATION}, {@link #ERROR}
2225     */
2226    public int setAuxEffectSendLevel(float level) {
2227        if (isRestricted()) {
2228            return SUCCESS;
2229        }
2230        if (mState == STATE_UNINITIALIZED) {
2231            return ERROR_INVALID_OPERATION;
2232        }
2233        level = clampGainOrLevel(level);
2234        int err = native_setAuxEffectSendLevel(level);
2235        return err == 0 ? SUCCESS : ERROR;
2236    }
2237
2238    //--------------------------------------------------------------------------
2239    // Explicit Routing
2240    //--------------------
2241    private AudioDeviceInfo mPreferredDevice = null;
2242
2243    /**
2244     * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
2245     * the output from this AudioTrack.
2246     * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink.
2247     *  If deviceInfo is null, default routing is restored.
2248     * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
2249     * does not correspond to a valid audio output device.
2250     */
2251    public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
2252        // Do some validation....
2253        if (deviceInfo != null && !deviceInfo.isSink()) {
2254            return false;
2255        }
2256        int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
2257        boolean status = native_setOutputDevice(preferredDeviceId);
2258        if (status == true) {
2259            synchronized (this) {
2260                mPreferredDevice = deviceInfo;
2261            }
2262        }
2263        return status;
2264    }
2265
2266    /**
2267     * Returns the selected output specified by {@link #setPreferredDevice}. Note that this
2268     * is not guaranteed to correspond to the actual device being used for playback.
2269     */
2270    public AudioDeviceInfo getPreferredDevice() {
2271        synchronized (this) {
2272            return mPreferredDevice;
2273        }
2274    }
2275
2276    /**
2277     * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioTrack.
2278     * Note: The query is only valid if the AudioTrack is currently playing. If it is not,
2279     * <code>getRoutedDevice()</code> will return null.
2280     */
2281    public AudioDeviceInfo getRoutedDevice() {
2282        int deviceId = native_getRoutedDeviceId();
2283        if (deviceId == 0) {
2284            return null;
2285        }
2286        AudioDeviceInfo[] devices =
2287                AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_OUTPUTS);
2288        for (int i = 0; i < devices.length; i++) {
2289            if (devices[i].getId() == deviceId) {
2290                return devices[i];
2291            }
2292        }
2293        return null;
2294    }
2295
2296    /*
2297     * Call BEFORE adding a routing callback handler.
2298     */
2299    private void testEnableNativeRoutingCallbacks() {
2300        if (mRoutingChangeListeners.size() == 0 && mNewRoutingChangeListeners.size() == 0) {
2301            native_enableDeviceCallback();
2302        }
2303    }
2304
2305    /*
2306     * Call AFTER removing a routing callback handler.
2307     */
2308    private void testDisableNativeRoutingCallbacks() {
2309        if (mRoutingChangeListeners.size() == 0 && mNewRoutingChangeListeners.size() == 0) {
2310            native_disableDeviceCallback();
2311        }
2312    }
2313
2314    //--------------------------------------------------------------------------
2315    // >= "N" (Re)Routing Info
2316    //--------------------
2317    /**
2318     * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
2319     * {@link AudioTrack#addOnRoutingListener(AudioRouting.OnRoutingChangedListener,
2320     *          android.os.Handler)}
2321     * by an app to receive (re)routing notifications.
2322     */
2323   private ArrayMap<AudioRouting.OnRoutingChangedListener, NativeNewRoutingEventHandlerDelegate>
2324    mNewRoutingChangeListeners =
2325        new ArrayMap<AudioRouting.OnRoutingChangedListener, NativeNewRoutingEventHandlerDelegate>();
2326
2327   /**
2328    * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of routing
2329    * changes on this AudioTrack.
2330    * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
2331    * notifications of rerouting events.
2332    * @param handler  Specifies the {@link Handler} object for the thread on which to execute
2333    * the callback. If <code>null</code>, the {@link Handler} associated with the main
2334    * {@link Looper} will be used.
2335    */
2336    public void addOnRoutingListener(AudioRouting.OnRoutingChangedListener listener,
2337            Handler handler) {
2338        if (listener != null && !mNewRoutingChangeListeners.containsKey(listener)) {
2339            synchronized (mNewRoutingChangeListeners) {
2340                testEnableNativeRoutingCallbacks();
2341                mNewRoutingChangeListeners.put(
2342                    listener, new NativeNewRoutingEventHandlerDelegate(this, listener,
2343                            handler != null ? handler : new Handler(mInitializationLooper)));
2344            }
2345        }
2346    }
2347
2348    /**
2349     * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
2350     * to receive rerouting notifications.
2351     * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
2352     * to remove.
2353     */
2354    public void removeOnRoutingListener(AudioRouting.OnRoutingChangedListener listener) {
2355        if (mNewRoutingChangeListeners.containsKey(listener)) {
2356            mNewRoutingChangeListeners.remove(listener);
2357        }
2358        testDisableNativeRoutingCallbacks();
2359    }
2360
2361    //--------------------------------------------------------------------------
2362    // Marshmallow (Re)Routing Info
2363    //--------------------
2364    /**
2365     * Defines the interface by which applications can receive notifications of routing
2366     * changes for the associated {@link AudioTrack}.
2367     */
2368    @Deprecated
2369    public interface OnRoutingChangedListener {
2370        /**
2371         * Called when the routing of an AudioTrack changes from either and explicit or
2372         * policy rerouting.  Use {@link #getRoutedDevice()} to retrieve the newly routed-to
2373         * device.
2374         */
2375        @Deprecated
2376        public void onRoutingChanged(AudioTrack audioTrack);
2377    }
2378
2379    /**
2380     * The list of AudioTrack.OnRoutingChangedListener interfaces added (with
2381     * {@link AudioTrack#addOnRoutingChangedListener(OnRoutingChangedListener, android.os.Handler)}
2382     * by an app to receive (re)routing notifications.
2383     */
2384    private ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>
2385        mRoutingChangeListeners =
2386            new ArrayMap<OnRoutingChangedListener, NativeRoutingEventHandlerDelegate>();
2387
2388    /**
2389     * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
2390     * on this AudioTrack.
2391     * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
2392     * of rerouting events.
2393     * @param handler  Specifies the {@link Handler} object for the thread on which to execute
2394     * the callback. If <code>null</code>, the {@link Handler} associated with the main
2395     * {@link Looper} will be used.
2396     */
2397    @Deprecated
2398    public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
2399            android.os.Handler handler) {
2400        if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
2401            synchronized (mRoutingChangeListeners) {
2402                testEnableNativeRoutingCallbacks();
2403                mRoutingChangeListeners.put(
2404                    listener, new NativeRoutingEventHandlerDelegate(this, listener,
2405                            handler != null ? handler : new Handler(mInitializationLooper)));
2406            }
2407        }
2408    }
2409
2410    /**
2411     * Removes an {@link OnRoutingChangedListener} which has been previously added
2412     * to receive rerouting notifications.
2413     * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
2414     */
2415    @Deprecated
2416    public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
2417        synchronized (mRoutingChangeListeners) {
2418            if (mRoutingChangeListeners.containsKey(listener)) {
2419                mRoutingChangeListeners.remove(listener);
2420            }
2421            testDisableNativeRoutingCallbacks();
2422        }
2423    }
2424
2425    /**
2426     * Sends device list change notification to all listeners.
2427     */
2428    private void broadcastRoutingChange() {
2429        AudioManager.resetAudioPortGeneration();
2430
2431        // Marshmallow Routing
2432        Collection<NativeRoutingEventHandlerDelegate> values;
2433        synchronized (mRoutingChangeListeners) {
2434            values = mRoutingChangeListeners.values();
2435        }
2436        for(NativeRoutingEventHandlerDelegate delegate : values) {
2437            Handler handler = delegate.getHandler();
2438            if (handler != null) {
2439                handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
2440            }
2441        }
2442        // >= "N" Routing
2443        Collection<NativeNewRoutingEventHandlerDelegate> newValues;
2444        synchronized (mNewRoutingChangeListeners) {
2445            newValues = mNewRoutingChangeListeners.values();
2446        }
2447        for(NativeNewRoutingEventHandlerDelegate delegate : newValues) {
2448            Handler handler = delegate.getHandler();
2449            if (handler != null) {
2450                handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
2451            }
2452        }
2453    }
2454
2455    //---------------------------------------------------------
2456    // Interface definitions
2457    //--------------------
2458    /**
2459     * Interface definition for a callback to be invoked when the playback head position of
2460     * an AudioTrack has reached a notification marker or has increased by a certain period.
2461     */
2462    public interface OnPlaybackPositionUpdateListener  {
2463        /**
2464         * Called on the listener to notify it that the previously set marker has been reached
2465         * by the playback head.
2466         */
2467        void onMarkerReached(AudioTrack track);
2468
2469        /**
2470         * Called on the listener to periodically notify it that the playback head has reached
2471         * a multiple of the notification period.
2472         */
2473        void onPeriodicNotification(AudioTrack track);
2474    }
2475
2476    //---------------------------------------------------------
2477    // Inner classes
2478    //--------------------
2479    /**
2480     * Helper class to handle the forwarding of native events to the appropriate listener
2481     * (potentially) handled in a different thread
2482     */
2483    private class NativePositionEventHandlerDelegate {
2484        private final Handler mHandler;
2485
2486        NativePositionEventHandlerDelegate(final AudioTrack track,
2487                                   final OnPlaybackPositionUpdateListener listener,
2488                                   Handler handler) {
2489            // find the looper for our new event handler
2490            Looper looper;
2491            if (handler != null) {
2492                looper = handler.getLooper();
2493            } else {
2494                // no given handler, use the looper the AudioTrack was created in
2495                looper = mInitializationLooper;
2496            }
2497
2498            // construct the event handler with this looper
2499            if (looper != null) {
2500                // implement the event handler delegate
2501                mHandler = new Handler(looper) {
2502                    @Override
2503                    public void handleMessage(Message msg) {
2504                        if (track == null) {
2505                            return;
2506                        }
2507                        switch(msg.what) {
2508                        case NATIVE_EVENT_MARKER:
2509                            if (listener != null) {
2510                                listener.onMarkerReached(track);
2511                            }
2512                            break;
2513                        case NATIVE_EVENT_NEW_POS:
2514                            if (listener != null) {
2515                                listener.onPeriodicNotification(track);
2516                            }
2517                            break;
2518                        default:
2519                            loge("Unknown native event type: " + msg.what);
2520                            break;
2521                        }
2522                    }
2523                };
2524            } else {
2525                mHandler = null;
2526            }
2527        }
2528
2529        Handler getHandler() {
2530            return mHandler;
2531        }
2532    }
2533
2534    /**
2535     * Marshmallow Routing API.
2536     * Helper class to handle the forwarding of native events to the appropriate listener
2537     * (potentially) handled in a different thread
2538     */
2539    private class NativeRoutingEventHandlerDelegate {
2540        private final Handler mHandler;
2541
2542        NativeRoutingEventHandlerDelegate(final AudioTrack track,
2543                                   final OnRoutingChangedListener listener,
2544                                   Handler handler) {
2545            // find the looper for our new event handler
2546            Looper looper;
2547            if (handler != null) {
2548                looper = handler.getLooper();
2549            } else {
2550                // no given handler, use the looper the AudioTrack was created in
2551                looper = mInitializationLooper;
2552            }
2553
2554            // construct the event handler with this looper
2555            if (looper != null) {
2556                // implement the event handler delegate
2557                mHandler = new Handler(looper) {
2558                    @Override
2559                    public void handleMessage(Message msg) {
2560                        if (track == null) {
2561                            return;
2562                        }
2563                        switch(msg.what) {
2564                        case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
2565                            if (listener != null) {
2566                                listener.onRoutingChanged(track);
2567                            }
2568                            break;
2569                        default:
2570                            loge("Unknown native event type: " + msg.what);
2571                            break;
2572                        }
2573                    }
2574                };
2575            } else {
2576                mHandler = null;
2577            }
2578        }
2579
2580        Handler getHandler() {
2581            return mHandler;
2582        }
2583    }
2584
2585    /**
2586     * Marshmallow Routing API.
2587     * Helper class to handle the forwarding of native events to the appropriate listener
2588     * (potentially) handled in a different thread
2589     */
2590    private class NativeNewRoutingEventHandlerDelegate {
2591        private final Handler mHandler;
2592
2593        NativeNewRoutingEventHandlerDelegate(final AudioTrack track,
2594                                   final AudioRouting.OnRoutingChangedListener listener,
2595                                   Handler handler) {
2596            // find the looper for our new event handler
2597            Looper looper;
2598            if (handler != null) {
2599                looper = handler.getLooper();
2600            } else {
2601                // no given handler, use the looper the AudioTrack was created in
2602                looper = mInitializationLooper;
2603            }
2604
2605            // construct the event handler with this looper
2606            if (looper != null) {
2607                // implement the event handler delegate
2608                mHandler = new Handler(looper) {
2609                    @Override
2610                    public void handleMessage(Message msg) {
2611                        if (track == null) {
2612                            return;
2613                        }
2614                        switch(msg.what) {
2615                        case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
2616                            if (listener != null) {
2617                                listener.onRoutingChanged(track);
2618                            }
2619                            break;
2620                        default:
2621                            loge("Unknown native event type: " + msg.what);
2622                            break;
2623                        }
2624                    }
2625                };
2626            } else {
2627                mHandler = null;
2628            }
2629        }
2630
2631        Handler getHandler() {
2632            return mHandler;
2633        }
2634    }
2635
2636    //---------------------------------------------------------
2637    // Java methods called from the native side
2638    //--------------------
2639    @SuppressWarnings("unused")
2640    private static void postEventFromNative(Object audiotrack_ref,
2641            int what, int arg1, int arg2, Object obj) {
2642        //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
2643        AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get();
2644        if (track == null) {
2645            return;
2646        }
2647
2648        if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
2649            track.broadcastRoutingChange();
2650            return;
2651        }
2652        NativePositionEventHandlerDelegate delegate = track.mEventHandlerDelegate;
2653        if (delegate != null) {
2654            Handler handler = delegate.getHandler();
2655            if (handler != null) {
2656                Message m = handler.obtainMessage(what, arg1, arg2, obj);
2657                handler.sendMessage(m);
2658            }
2659        }
2660    }
2661
2662
2663    //---------------------------------------------------------
2664    // Native methods called from the Java side
2665    //--------------------
2666
2667    // post-condition: mStreamType is overwritten with a value
2668    //     that reflects the audio attributes (e.g. an AudioAttributes object with a usage of
2669    //     AudioAttributes.USAGE_MEDIA will map to AudioManager.STREAM_MUSIC
2670    private native final int native_setup(Object /*WeakReference<AudioTrack>*/ audiotrack_this,
2671            Object /*AudioAttributes*/ attributes,
2672            int sampleRate, int channelMask, int channelIndexMask, int audioFormat,
2673            int buffSizeInBytes, int mode, int[] sessionId);
2674
2675    private native final void native_finalize();
2676
2677    private native final void native_release();
2678
2679    private native final void native_start();
2680
2681    private native final void native_stop();
2682
2683    private native final void native_pause();
2684
2685    private native final void native_flush();
2686
2687    private native final int native_write_byte(byte[] audioData,
2688                                               int offsetInBytes, int sizeInBytes, int format,
2689                                               boolean isBlocking);
2690
2691    private native final int native_write_short(short[] audioData,
2692                                                int offsetInShorts, int sizeInShorts, int format,
2693                                                boolean isBlocking);
2694
2695    private native final int native_write_float(float[] audioData,
2696                                                int offsetInFloats, int sizeInFloats, int format,
2697                                                boolean isBlocking);
2698
2699    private native final int native_write_native_bytes(Object audioData,
2700            int positionInBytes, int sizeInBytes, int format, boolean blocking);
2701
2702    private native final int native_reload_static();
2703
2704    private native final int native_get_native_frame_count();
2705
2706    private native final void native_setVolume(float leftVolume, float rightVolume);
2707
2708    private native final int native_set_playback_rate(int sampleRateInHz);
2709    private native final int native_get_playback_rate();
2710
2711    private native final void native_set_playback_params(@NonNull PlaybackParams params);
2712    private native final @NonNull PlaybackParams native_get_playback_params();
2713
2714    private native final int native_set_marker_pos(int marker);
2715    private native final int native_get_marker_pos();
2716
2717    private native final int native_set_pos_update_period(int updatePeriod);
2718    private native final int native_get_pos_update_period();
2719
2720    private native final int native_set_position(int position);
2721    private native final int native_get_position();
2722
2723    private native final int native_get_latency();
2724
2725    // longArray must be a non-null array of length >= 2
2726    // [0] is assigned the frame position
2727    // [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
2728    private native final int native_get_timestamp(long[] longArray);
2729
2730    private native final int native_set_loop(int start, int end, int loopCount);
2731
2732    static private native final int native_get_output_sample_rate(int streamType);
2733    static private native final int native_get_min_buff_size(
2734            int sampleRateInHz, int channelConfig, int audioFormat);
2735
2736    private native final int native_attachAuxEffect(int effectId);
2737    private native final int native_setAuxEffectSendLevel(float level);
2738
2739    private native final boolean native_setOutputDevice(int deviceId);
2740    private native final int native_getRoutedDeviceId();
2741    private native final void native_enableDeviceCallback();
2742    private native final void native_disableDeviceCallback();
2743    static private native int native_get_FCC_8();
2744
2745    //---------------------------------------------------------
2746    // Utility methods
2747    //------------------
2748
2749    private static void logd(String msg) {
2750        Log.d(TAG, msg);
2751    }
2752
2753    private static void loge(String msg) {
2754        Log.e(TAG, msg);
2755    }
2756}
2757