1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import java.lang.ref.WeakReference;
20
21import android.os.Handler;
22import android.os.Looper;
23import android.os.Message;
24import android.util.Log;
25
26
27/**
28 * The AudioTrack class manages and plays a single audio resource for Java applications.
29 * It allows streaming PCM audio buffers to the audio hardware for playback. This is
30 * achieved by "pushing" the data to the AudioTrack object using one of the
31 *  {@link #write(byte[], int, int)} and {@link #write(short[], int, int)} methods.
32 *
33 * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
34 * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
35 * one of the {@code write()} methods. These are blocking and return when the data has been
36 * transferred from the Java layer to the native layer and queued for playback. The streaming
37 * mode is most useful when playing blocks of audio data that for instance are:
38 *
39 * <ul>
40 *   <li>too big to fit in memory because of the duration of the sound to play,</li>
41 *   <li>too big to fit in memory because of the characteristics of the audio data
42 *         (high sampling rate, bits per sample ...)</li>
43 *   <li>received or generated while previously queued audio is playing.</li>
44 * </ul>
45 *
46 * The static mode should be chosen when dealing with short sounds that fit in memory and
47 * that need to be played with the smallest latency possible. The static mode will
48 * therefore be preferred for UI and game sounds that are played often, and with the
49 * smallest overhead possible.
50 *
51 * <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
52 * The size of this buffer, specified during the construction, determines how long an AudioTrack
53 * can play before running out of data.<br>
54 * For an AudioTrack using the static mode, this size is the maximum size of the sound that can
55 * be played from it.<br>
56 * For the streaming mode, data will be written to the hardware in chunks of
57 * sizes less than or equal to the total buffer size.
58 */
59public class AudioTrack
60{
61    //---------------------------------------------------------
62    // Constants
63    //--------------------
64    /** Minimum value for a channel volume */
65    private static final float VOLUME_MIN = 0.0f;
66    /** Maximum value for a channel volume */
67    private static final float VOLUME_MAX = 1.0f;
68
69    /** Minimum value for sample rate */
70    private static final int SAMPLE_RATE_HZ_MIN = 4000;
71    /** Maximum value for sample rate */
72    private static final int SAMPLE_RATE_HZ_MAX = 48000;
73
74    /** indicates AudioTrack state is stopped */
75    public static final int PLAYSTATE_STOPPED = 1;  // matches SL_PLAYSTATE_STOPPED
76    /** indicates AudioTrack state is paused */
77    public static final int PLAYSTATE_PAUSED  = 2;  // matches SL_PLAYSTATE_PAUSED
78    /** indicates AudioTrack state is playing */
79    public static final int PLAYSTATE_PLAYING = 3;  // matches SL_PLAYSTATE_PLAYING
80
81    // keep these values in sync with android_media_AudioTrack.cpp
82    /**
83     * Creation mode where audio data is transferred from Java to the native layer
84     * only once before the audio starts playing.
85     */
86    public static final int MODE_STATIC = 0;
87    /**
88     * Creation mode where audio data is streamed from Java to the native layer
89     * as the audio is playing.
90     */
91    public static final int MODE_STREAM = 1;
92
93    /**
94     * State of an AudioTrack that was not successfully initialized upon creation.
95     */
96    public static final int STATE_UNINITIALIZED = 0;
97    /**
98     * State of an AudioTrack that is ready to be used.
99     */
100    public static final int STATE_INITIALIZED   = 1;
101    /**
102     * State of a successfully initialized AudioTrack that uses static data,
103     * but that hasn't received that data yet.
104     */
105    public static final int STATE_NO_STATIC_DATA = 2;
106
107    // Error codes:
108    // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
109    /**
110     * Denotes a successful operation.
111     */
112    public  static final int SUCCESS                               = 0;
113    /**
114     * Denotes a generic operation failure.
115     */
116    public  static final int ERROR                                 = -1;
117    /**
118     * Denotes a failure due to the use of an invalid value.
119     */
120    public  static final int ERROR_BAD_VALUE                       = -2;
121    /**
122     * Denotes a failure due to the improper use of a method.
123     */
124    public  static final int ERROR_INVALID_OPERATION               = -3;
125
126    private static final int ERROR_NATIVESETUP_AUDIOSYSTEM         = -16;
127    private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK  = -17;
128    private static final int ERROR_NATIVESETUP_INVALIDFORMAT       = -18;
129    private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE   = -19;
130    private static final int ERROR_NATIVESETUP_NATIVEINITFAILED    = -20;
131
132    // Events:
133    // to keep in sync with frameworks/base/include/media/AudioTrack.h
134    /**
135     * Event id denotes when playback head has reached a previously set marker.
136     */
137    private static final int NATIVE_EVENT_MARKER  = 3;
138    /**
139     * Event id denotes when previously set update period has elapsed during playback.
140     */
141    private static final int NATIVE_EVENT_NEW_POS = 4;
142
143    private final static String TAG = "android.media.AudioTrack";
144
145
146    //--------------------------------------------------------------------------
147    // Member variables
148    //--------------------
149    /**
150     * Indicates the state of the AudioTrack instance.
151     */
152    private int mState = STATE_UNINITIALIZED;
153    /**
154     * Indicates the play state of the AudioTrack instance.
155     */
156    private int mPlayState = PLAYSTATE_STOPPED;
157    /**
158     * Lock to make sure mPlayState updates are reflecting the actual state of the object.
159     */
160    private final Object mPlayStateLock = new Object();
161    /**
162     * Size of the native audio buffer.
163     */
164    private int mNativeBufferSizeInBytes = 0;
165    /**
166     * Handler for marker events coming from the native code.
167     */
168    private NativeEventHandlerDelegate mEventHandlerDelegate;
169    /**
170     * Looper associated with the thread that creates the AudioTrack instance.
171     */
172    private final Looper mInitializationLooper;
173    /**
174     * The audio data sampling rate in Hz.
175     */
176    private int mSampleRate; // initialized by all constructors
177    /**
178     * The number of audio output channels (1 is mono, 2 is stereo).
179     */
180    private int mChannelCount = 1;
181    /**
182     * The audio channel mask.
183     */
184    private int mChannels = AudioFormat.CHANNEL_OUT_MONO;
185
186    /**
187     * The type of the audio stream to play. See
188     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
189     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
190     *   {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and
191     *   {@link AudioManager#STREAM_DTMF}.
192     */
193    private int mStreamType = AudioManager.STREAM_MUSIC;
194    /**
195     * The way audio is consumed by the hardware, streaming or static.
196     */
197    private int mDataLoadMode = MODE_STREAM;
198    /**
199     * The current audio channel configuration.
200     */
201    private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
202    /**
203     * The encoding of the audio samples.
204     * @see AudioFormat#ENCODING_PCM_8BIT
205     * @see AudioFormat#ENCODING_PCM_16BIT
206     */
207    private int mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
208    /**
209     * Audio session ID
210     */
211    private int mSessionId = 0;
212
213
214    //--------------------------------
215    // Used exclusively by native code
216    //--------------------
217    /**
218     * Accessed by native methods: provides access to C++ AudioTrack object.
219     */
220    @SuppressWarnings("unused")
221    private int mNativeTrackInJavaObj;
222    /**
223     * Accessed by native methods: provides access to the JNI data (i.e. resources used by
224     * the native AudioTrack object, but not stored in it).
225     */
226    @SuppressWarnings("unused")
227    private int mJniData;
228
229
230    //--------------------------------------------------------------------------
231    // Constructor, Finalize
232    //--------------------
233    /**
234     * Class constructor.
235     * @param streamType the type of the audio stream. See
236     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
237     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
238     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
239     * @param sampleRateInHz the sample rate expressed in Hertz.
240     * @param channelConfig describes the configuration of the audio channels.
241     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
242     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
243     * @param audioFormat the format in which the audio data is represented.
244     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
245     *   {@link AudioFormat#ENCODING_PCM_8BIT}
246     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
247     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
248     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
249     *   this is the maximum size of the sound that will be played for this instance.
250     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
251     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
252     *   smaller than getMinBufferSize() will result in an initialization failure.
253     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
254     * @throws java.lang.IllegalArgumentException
255     */
256    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
257            int bufferSizeInBytes, int mode)
258    throws IllegalArgumentException {
259        this(streamType, sampleRateInHz, channelConfig, audioFormat,
260                bufferSizeInBytes, mode, 0);
261    }
262
263    /**
264     * Class constructor with audio session. Use this constructor when the AudioTrack must be
265     * attached to a particular audio session. The primary use of the audio session ID is to
266     * associate audio effects to a particular instance of AudioTrack: if an audio session ID
267     * is provided when creating an AudioEffect, this effect will be applied only to audio tracks
268     * and media players in the same session and not to the output mix.
269     * When an AudioTrack is created without specifying a session, it will create its own session
270     * which can be retreived by calling the {@link #getAudioSessionId()} method.
271     * If a non-zero session ID is provided, this AudioTrack will share effects attached to this
272     * session
273     * with all other media players or audio tracks in the same session, otherwise a new session
274     * will be created for this track if none is supplied.
275     * @param streamType the type of the audio stream. See
276     *   {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
277     *   {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
278     *   {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
279     * @param sampleRateInHz the sample rate expressed in Hertz.
280     * @param channelConfig describes the configuration of the audio channels.
281     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
282     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
283     * @param audioFormat the format in which the audio data is represented.
284     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
285     *   {@link AudioFormat#ENCODING_PCM_8BIT}
286     * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is read
287     *   from for playback. If using the AudioTrack in streaming mode, you can write data into
288     *   this buffer in smaller chunks than this size. If using the AudioTrack in static mode,
289     *   this is the maximum size of the sound that will be played for this instance.
290     *   See {@link #getMinBufferSize(int, int, int)} to determine the minimum required buffer size
291     *   for the successful creation of an AudioTrack instance in streaming mode. Using values
292     *   smaller than getMinBufferSize() will result in an initialization failure.
293     * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
294     * @param sessionId Id of audio session the AudioTrack must be attached to
295     * @throws java.lang.IllegalArgumentException
296     */
297    public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
298            int bufferSizeInBytes, int mode, int sessionId)
299    throws IllegalArgumentException {
300        // mState already == STATE_UNINITIALIZED
301
302        // remember which looper is associated with the AudioTrack instantiation
303        Looper looper;
304        if ((looper = Looper.myLooper()) == null) {
305            looper = Looper.getMainLooper();
306        }
307        mInitializationLooper = looper;
308
309        audioParamCheck(streamType, sampleRateInHz, channelConfig, audioFormat, mode);
310
311        audioBuffSizeCheck(bufferSizeInBytes);
312
313        if (sessionId < 0) {
314            throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
315        }
316
317        int[] session = new int[1];
318        session[0] = sessionId;
319        // native initialization
320        int initResult = native_setup(new WeakReference<AudioTrack>(this),
321                mStreamType, mSampleRate, mChannels, mAudioFormat,
322                mNativeBufferSizeInBytes, mDataLoadMode, session);
323        if (initResult != SUCCESS) {
324            loge("Error code "+initResult+" when initializing AudioTrack.");
325            return; // with mState == STATE_UNINITIALIZED
326        }
327
328        mSessionId = session[0];
329
330        if (mDataLoadMode == MODE_STATIC) {
331            mState = STATE_NO_STATIC_DATA;
332        } else {
333            mState = STATE_INITIALIZED;
334        }
335    }
336
337    // mask of all the channels supported by this implementation
338    private static final int SUPPORTED_OUT_CHANNELS =
339            AudioFormat.CHANNEL_OUT_FRONT_LEFT |
340            AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
341            AudioFormat.CHANNEL_OUT_FRONT_CENTER |
342            AudioFormat.CHANNEL_OUT_LOW_FREQUENCY |
343            AudioFormat.CHANNEL_OUT_BACK_LEFT |
344            AudioFormat.CHANNEL_OUT_BACK_RIGHT |
345            AudioFormat.CHANNEL_OUT_BACK_CENTER;
346
347    // Convenience method for the constructor's parameter checks.
348    // This is where constructor IllegalArgumentException-s are thrown
349    // postconditions:
350    //    mStreamType is valid
351    //    mChannelCount is valid
352    //    mChannels is valid
353    //    mAudioFormat is valid
354    //    mSampleRate is valid
355    //    mDataLoadMode is valid
356    private void audioParamCheck(int streamType, int sampleRateInHz,
357                                 int channelConfig, int audioFormat, int mode) {
358
359        //--------------
360        // stream type
361        if( (streamType != AudioManager.STREAM_ALARM) && (streamType != AudioManager.STREAM_MUSIC)
362           && (streamType != AudioManager.STREAM_RING) && (streamType != AudioManager.STREAM_SYSTEM)
363           && (streamType != AudioManager.STREAM_VOICE_CALL)
364           && (streamType != AudioManager.STREAM_NOTIFICATION)
365           && (streamType != AudioManager.STREAM_BLUETOOTH_SCO)
366           && (streamType != AudioManager.STREAM_DTMF)) {
367            throw new IllegalArgumentException("Invalid stream type.");
368        } else {
369            mStreamType = streamType;
370        }
371
372        //--------------
373        // sample rate, note these values are subject to change
374        if ( (sampleRateInHz < 4000) || (sampleRateInHz > 48000) ) {
375            throw new IllegalArgumentException(sampleRateInHz
376                    + "Hz is not a supported sample rate.");
377        } else {
378            mSampleRate = sampleRateInHz;
379        }
380
381        //--------------
382        // channel config
383        mChannelConfiguration = channelConfig;
384
385        switch (channelConfig) {
386        case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
387        case AudioFormat.CHANNEL_OUT_MONO:
388        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
389            mChannelCount = 1;
390            mChannels = AudioFormat.CHANNEL_OUT_MONO;
391            break;
392        case AudioFormat.CHANNEL_OUT_STEREO:
393        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
394            mChannelCount = 2;
395            mChannels = AudioFormat.CHANNEL_OUT_STEREO;
396            break;
397        default:
398            if (!isMultichannelConfigSupported(channelConfig)) {
399                // input channel configuration features unsupported channels
400                mChannelCount = 0;
401                mChannels = AudioFormat.CHANNEL_INVALID;
402                mChannelConfiguration = AudioFormat.CHANNEL_INVALID;
403                throw new IllegalArgumentException("Unsupported channel configuration.");
404            } else {
405                mChannels = channelConfig;
406                mChannelCount = Integer.bitCount(channelConfig);
407            }
408        }
409
410        //--------------
411        // audio format
412        switch (audioFormat) {
413        case AudioFormat.ENCODING_DEFAULT:
414            mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
415            break;
416        case AudioFormat.ENCODING_PCM_16BIT:
417        case AudioFormat.ENCODING_PCM_8BIT:
418            mAudioFormat = audioFormat;
419            break;
420        default:
421            mAudioFormat = AudioFormat.ENCODING_INVALID;
422            throw new IllegalArgumentException("Unsupported sample encoding."
423                + " Should be ENCODING_PCM_8BIT or ENCODING_PCM_16BIT.");
424        }
425
426        //--------------
427        // audio load mode
428        if ( (mode != MODE_STREAM) && (mode != MODE_STATIC) ) {
429            throw new IllegalArgumentException("Invalid mode.");
430        } else {
431            mDataLoadMode = mode;
432        }
433    }
434
435    /**
436     * Convenience method to check that the channel configuration (a.k.a channel mask) is supported
437     * @param channelConfig the mask to validate
438     * @return false if the AudioTrack can't be used with such a mask
439     */
440    private static boolean isMultichannelConfigSupported(int channelConfig) {
441        // check for unsupported channels
442        if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
443            loge("Channel configuration features unsupported channels");
444            return false;
445        }
446        // check for unsupported multichannel combinations:
447        // - FL/FR must be present
448        // - L/R channels must be paired (e.g. no single L channel)
449        final int frontPair =
450                AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
451        if ((channelConfig & frontPair) != frontPair) {
452                loge("Front channels must be present in multichannel configurations");
453                return false;
454        }
455        final int backPair =
456                AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
457        if ((channelConfig & backPair) != 0) {
458            if ((channelConfig & backPair) != backPair) {
459                loge("Rear channels can't be used independently");
460                return false;
461            }
462        }
463        return true;
464    }
465
466
467    // Convenience method for the contructor's audio buffer size check.
468    // preconditions:
469    //    mChannelCount is valid
470    //    mAudioFormat is valid
471    // postcondition:
472    //    mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
473    private void audioBuffSizeCheck(int audioBufferSize) {
474        // NB: this section is only valid with PCM data.
475        //     To update when supporting compressed formats
476        int frameSizeInBytes = mChannelCount
477                * (mAudioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
478        if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
479            throw new IllegalArgumentException("Invalid audio buffer size.");
480        }
481
482        mNativeBufferSizeInBytes = audioBufferSize;
483    }
484
485
486    /**
487     * Releases the native AudioTrack resources.
488     */
489    public void release() {
490        // even though native_release() stops the native AudioTrack, we need to stop
491        // AudioTrack subclasses too.
492        try {
493            stop();
494        } catch(IllegalStateException ise) {
495            // don't raise an exception, we're releasing the resources.
496        }
497        native_release();
498        mState = STATE_UNINITIALIZED;
499    }
500
501    @Override
502    protected void finalize() {
503        native_finalize();
504    }
505
506    //--------------------------------------------------------------------------
507    // Getters
508    //--------------------
509    /**
510     * Returns the minimum valid volume value. Volume values set under this one will
511     * be clamped at this value.
512     * @return the minimum volume expressed as a linear attenuation.
513     */
514    static public float getMinVolume() {
515        return VOLUME_MIN;
516    }
517
518    /**
519     * Returns the maximum valid volume value. Volume values set above this one will
520     * be clamped at this value.
521     * @return the maximum volume expressed as a linear attenuation.
522     */
523    static public float getMaxVolume() {
524        return VOLUME_MAX;
525    }
526
527    /**
528     * Returns the configured audio data sample rate in Hz
529     */
530    public int getSampleRate() {
531        return mSampleRate;
532    }
533
534    /**
535     * Returns the current playback rate in Hz.
536     */
537    public int getPlaybackRate() {
538        return native_get_playback_rate();
539    }
540
541    /**
542     * Returns the configured audio data format. See {@link AudioFormat#ENCODING_PCM_16BIT}
543     * and {@link AudioFormat#ENCODING_PCM_8BIT}.
544     */
545    public int getAudioFormat() {
546        return mAudioFormat;
547    }
548
549    /**
550     * Returns the type of audio stream this AudioTrack is configured for.
551     * Compare the result against {@link AudioManager#STREAM_VOICE_CALL},
552     * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
553     * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
554     * {@link AudioManager#STREAM_NOTIFICATION}, or {@link AudioManager#STREAM_DTMF}.
555     */
556    public int getStreamType() {
557        return mStreamType;
558    }
559
560    /**
561     * Returns the configured channel configuration.
562
563     * See {@link AudioFormat#CHANNEL_OUT_MONO}
564     * and {@link AudioFormat#CHANNEL_OUT_STEREO}.
565     */
566    public int getChannelConfiguration() {
567        return mChannelConfiguration;
568    }
569
570    /**
571     * Returns the configured number of channels.
572     */
573    public int getChannelCount() {
574        return mChannelCount;
575    }
576
577    /**
578     * Returns the state of the AudioTrack instance. This is useful after the
579     * AudioTrack instance has been created to check if it was initialized
580     * properly. This ensures that the appropriate hardware resources have been
581     * acquired.
582     * @see #STATE_INITIALIZED
583     * @see #STATE_NO_STATIC_DATA
584     * @see #STATE_UNINITIALIZED
585     */
586    public int getState() {
587        return mState;
588    }
589
590    /**
591     * Returns the playback state of the AudioTrack instance.
592     * @see #PLAYSTATE_STOPPED
593     * @see #PLAYSTATE_PAUSED
594     * @see #PLAYSTATE_PLAYING
595     */
596    public int getPlayState() {
597        synchronized (mPlayStateLock) {
598            return mPlayState;
599        }
600    }
601
602    /**
603     *  Returns the native frame count used by the hardware.
604     */
605    protected int getNativeFrameCount() {
606        return native_get_native_frame_count();
607    }
608
609    /**
610     * Returns marker position expressed in frames.
611     */
612    public int getNotificationMarkerPosition() {
613        return native_get_marker_pos();
614    }
615
616    /**
617     * Returns the notification update period expressed in frames.
618     */
619    public int getPositionNotificationPeriod() {
620        return native_get_pos_update_period();
621    }
622
623    /**
624     * Returns the playback head position expressed in frames
625     */
626    public int getPlaybackHeadPosition() {
627        return native_get_position();
628    }
629
630    /**
631     * Returns this track's estimated latency in milliseconds. This includes the latency due
632     * to AudioTrack buffer size, AudioMixer (if any) and audio hardware driver.
633     *
634     * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
635     * a better solution.
636     * @hide
637     */
638    public int getLatency() {
639        return native_get_latency();
640    }
641
642    /**
643     *  Returns the hardware output sample rate
644     */
645    static public int getNativeOutputSampleRate(int streamType) {
646        return native_get_output_sample_rate(streamType);
647    }
648
649    /**
650     * Returns the minimum buffer size required for the successful creation of an AudioTrack
651     * object to be created in the {@link #MODE_STREAM} mode. Note that this size doesn't
652     * guarantee a smooth playback under load, and higher values should be chosen according to
653     * the expected frequency at which the buffer will be refilled with additional data to play.
654     * @param sampleRateInHz the sample rate expressed in Hertz.
655     * @param channelConfig describes the configuration of the audio channels.
656     *   See {@link AudioFormat#CHANNEL_OUT_MONO} and
657     *   {@link AudioFormat#CHANNEL_OUT_STEREO}
658     * @param audioFormat the format in which the audio data is represented.
659     *   See {@link AudioFormat#ENCODING_PCM_16BIT} and
660     *   {@link AudioFormat#ENCODING_PCM_8BIT}
661     * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
662     *   or {@link #ERROR} if the implementation was unable to query the hardware for its output
663     *     properties,
664     *   or the minimum buffer size expressed in bytes.
665     */
666    static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
667        int channelCount = 0;
668        switch(channelConfig) {
669        case AudioFormat.CHANNEL_OUT_MONO:
670        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
671            channelCount = 1;
672            break;
673        case AudioFormat.CHANNEL_OUT_STEREO:
674        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
675            channelCount = 2;
676            break;
677        default:
678            if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
679                // input channel configuration features unsupported channels
680                loge("getMinBufferSize(): Invalid channel configuration.");
681                return ERROR_BAD_VALUE;
682            } else {
683                channelCount = Integer.bitCount(channelConfig);
684            }
685        }
686
687        if ((audioFormat != AudioFormat.ENCODING_PCM_16BIT)
688            && (audioFormat != AudioFormat.ENCODING_PCM_8BIT)) {
689            loge("getMinBufferSize(): Invalid audio format.");
690            return ERROR_BAD_VALUE;
691        }
692
693        // sample rate, note these values are subject to change
694        if ( (sampleRateInHz < SAMPLE_RATE_HZ_MIN) || (sampleRateInHz > SAMPLE_RATE_HZ_MAX) ) {
695            loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
696            return ERROR_BAD_VALUE;
697        }
698
699        int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
700        if (size <= 0) {
701            loge("getMinBufferSize(): error querying hardware");
702            return ERROR;
703        }
704        else {
705            return size;
706        }
707    }
708
709    /**
710     * Returns the audio session ID.
711     *
712     * @return the ID of the audio session this AudioTrack belongs to.
713     */
714    public int getAudioSessionId() {
715        return mSessionId;
716    }
717
718    //--------------------------------------------------------------------------
719    // Initialization / configuration
720    //--------------------
721    /**
722     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
723     * for each periodic playback head position update.
724     * Notifications will be received in the same thread as the one in which the AudioTrack
725     * instance was created.
726     * @param listener
727     */
728    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
729        setPlaybackPositionUpdateListener(listener, null);
730    }
731
732    /**
733     * Sets the listener the AudioTrack notifies when a previously set marker is reached or
734     * for each periodic playback head position update.
735     * Use this method to receive AudioTrack events in the Handler associated with another
736     * thread than the one in which you created the AudioTrack instance.
737     * @param listener
738     * @param handler the Handler that will receive the event notification messages.
739     */
740    public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
741                                                    Handler handler) {
742        if (listener != null) {
743            mEventHandlerDelegate = new NativeEventHandlerDelegate(this, listener, handler);
744        } else {
745            mEventHandlerDelegate = null;
746        }
747    }
748
749
750
751     /**
752     * Sets the specified left/right output volume values on the AudioTrack. Values are clamped
753     * to the ({@link #getMinVolume()}, {@link #getMaxVolume()}) interval if outside this range.
754     * @param leftVolume output attenuation for the left channel. A value of 0.0f is silence,
755     *      a value of 1.0f is no attenuation.
756     * @param rightVolume output attenuation for the right channel
757     * @return error code or success, see {@link #SUCCESS},
758     *    {@link #ERROR_INVALID_OPERATION}
759     */
760    public int setStereoVolume(float leftVolume, float rightVolume) {
761        if (mState == STATE_UNINITIALIZED) {
762            return ERROR_INVALID_OPERATION;
763        }
764
765        // clamp the volumes
766        if (leftVolume < getMinVolume()) {
767            leftVolume = getMinVolume();
768        }
769        if (leftVolume > getMaxVolume()) {
770            leftVolume = getMaxVolume();
771        }
772        if (rightVolume < getMinVolume()) {
773            rightVolume = getMinVolume();
774        }
775        if (rightVolume > getMaxVolume()) {
776            rightVolume = getMaxVolume();
777        }
778
779        native_setVolume(leftVolume, rightVolume);
780
781        return SUCCESS;
782    }
783
784
785    /**
786     * Similar, except set volume of all channels to same value.
787     * @hide
788     */
789    public int setVolume(float volume) {
790        return setStereoVolume(volume, volume);
791    }
792
793
794    /**
795     * Sets the playback sample rate for this track. This sets the sampling rate at which
796     * the audio data will be consumed and played back, not the original sampling rate of the
797     * content. Setting it to half the sample rate of the content will cause the playback to
798     * last twice as long, but will also result in a negative pitch shift.
799     * The valid sample rate range is from 1Hz to twice the value returned by
800     * {@link #getNativeOutputSampleRate(int)}.
801     * @param sampleRateInHz the sample rate expressed in Hz
802     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
803     *    {@link #ERROR_INVALID_OPERATION}
804     */
805    public int setPlaybackRate(int sampleRateInHz) {
806        if (mState != STATE_INITIALIZED) {
807            return ERROR_INVALID_OPERATION;
808        }
809        if (sampleRateInHz <= 0) {
810            return ERROR_BAD_VALUE;
811        }
812        return native_set_playback_rate(sampleRateInHz);
813    }
814
815
816    /**
817     * Sets the position of the notification marker.
818     * @param markerInFrames marker in frames
819     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
820     *  {@link #ERROR_INVALID_OPERATION}
821     */
822    public int setNotificationMarkerPosition(int markerInFrames) {
823        if (mState == STATE_UNINITIALIZED) {
824            return ERROR_INVALID_OPERATION;
825        }
826        return native_set_marker_pos(markerInFrames);
827    }
828
829
830    /**
831     * Sets the period for the periodic notification event.
832     * @param periodInFrames update period expressed in frames
833     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
834     */
835    public int setPositionNotificationPeriod(int periodInFrames) {
836        if (mState == STATE_UNINITIALIZED) {
837            return ERROR_INVALID_OPERATION;
838        }
839        return native_set_pos_update_period(periodInFrames);
840    }
841
842
843    /**
844     * Sets the playback head position.
845     * The track must be stopped or paused for the position to be changed,
846     * and must use the {@link #MODE_STATIC} mode.
847     * @param positionInFrames playback head position expressed in frames
848     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
849     *    {@link #ERROR_INVALID_OPERATION}
850     */
851    public int setPlaybackHeadPosition(int positionInFrames) {
852        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
853                getPlayState() == PLAYSTATE_PLAYING) {
854            return ERROR_INVALID_OPERATION;
855        }
856        return native_set_position(positionInFrames);
857    }
858
859    /**
860     * Sets the loop points and the loop count. The loop can be infinite.
861     * Similarly to setPlaybackHeadPosition,
862     * the track must be stopped or paused for the position to be changed,
863     * and must use the {@link #MODE_STATIC} mode.
864     * @param startInFrames loop start marker expressed in frames
865     * @param endInFrames loop end marker expressed in frames
866     * @param loopCount the number of times the loop is looped.
867     *    A value of -1 means infinite looping.
868     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
869     *    {@link #ERROR_INVALID_OPERATION}
870     */
871    public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
872        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED ||
873                getPlayState() == PLAYSTATE_PLAYING) {
874            return ERROR_INVALID_OPERATION;
875        }
876        return native_set_loop(startInFrames, endInFrames, loopCount);
877    }
878
879    /**
880     * Sets the initialization state of the instance. To be used in an AudioTrack subclass
881     * constructor to set a subclass-specific post-initialization state.
882     * @param state the state of the AudioTrack instance
883     */
884    protected void setState(int state) {
885        mState = state;
886    }
887
888
889    //---------------------------------------------------------
890    // Transport control methods
891    //--------------------
892    /**
893     * Starts playing an AudioTrack.
894     *
895     * @throws IllegalStateException
896     */
897    public void play()
898    throws IllegalStateException {
899        if (mState != STATE_INITIALIZED) {
900            throw new IllegalStateException("play() called on uninitialized AudioTrack.");
901        }
902
903        synchronized(mPlayStateLock) {
904            native_start();
905            mPlayState = PLAYSTATE_PLAYING;
906        }
907    }
908
909    /**
910     * Stops playing the audio data.
911     * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing
912     * after the last buffer that was written has been played. For an immediate stop, use
913     * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played
914     * back yet.
915     * @throws IllegalStateException
916     */
917    public void stop()
918    throws IllegalStateException {
919        if (mState != STATE_INITIALIZED) {
920            throw new IllegalStateException("stop() called on uninitialized AudioTrack.");
921        }
922
923        // stop playing
924        synchronized(mPlayStateLock) {
925            native_stop();
926            mPlayState = PLAYSTATE_STOPPED;
927        }
928    }
929
930    /**
931     * Pauses the playback of the audio data. Data that has not been played
932     * back will not be discarded. Subsequent calls to {@link #play} will play
933     * this data back. See {@link #flush()} to discard this data.
934     *
935     * @throws IllegalStateException
936     */
937    public void pause()
938    throws IllegalStateException {
939        if (mState != STATE_INITIALIZED) {
940            throw new IllegalStateException("pause() called on uninitialized AudioTrack.");
941        }
942        //logd("pause()");
943
944        // pause playback
945        synchronized(mPlayStateLock) {
946            native_pause();
947            mPlayState = PLAYSTATE_PAUSED;
948        }
949    }
950
951
952    //---------------------------------------------------------
953    // Audio data supply
954    //--------------------
955
956    /**
957     * Flushes the audio data currently queued for playback. Any data that has
958     * not been played back will be discarded.
959     */
960    public void flush() {
961        if (mState == STATE_INITIALIZED) {
962            // flush the data in native layer
963            native_flush();
964        }
965
966    }
967
968    /**
969     * Writes the audio data to the audio hardware for playback. Will block until
970     * all data has been written to the audio mixer.
971     * Note that the actual playback of this data might occur after this function
972     * returns. This function is thread safe with respect to {@link #stop} calls,
973     * in which case all of the specified data might not be written to the mixer.
974     *
975     * @param audioData the array that holds the data to play.
976     * @param offsetInBytes the offset expressed in bytes in audioData where the data to play
977     *    starts.
978     * @param sizeInBytes the number of bytes to read in audioData after the offset.
979     * @return the number of bytes that were written or {@link #ERROR_INVALID_OPERATION}
980     *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
981     *    the parameters don't resolve to valid data and indexes.
982     */
983
984    public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) {
985
986        if (mState == STATE_UNINITIALIZED) {
987            return ERROR_INVALID_OPERATION;
988        }
989
990        if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
991                || (offsetInBytes + sizeInBytes < 0)    // detect integer overflow
992                || (offsetInBytes + sizeInBytes > audioData.length)) {
993            return ERROR_BAD_VALUE;
994        }
995
996        int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat);
997
998        if ((mDataLoadMode == MODE_STATIC)
999                && (mState == STATE_NO_STATIC_DATA)
1000                && (ret > 0)) {
1001            // benign race with respect to other APIs that read mState
1002            mState = STATE_INITIALIZED;
1003        }
1004
1005        return ret;
1006    }
1007
1008
1009    /**
1010     * Writes the audio data to the audio hardware for playback. Will block until
1011     * all data has been written to the audio mixer.
1012     * Note that the actual playback of this data might occur after this function
1013     * returns. This function is thread safe with respect to {@link #stop} calls,
1014     * in which case all of the specified data might not be written to the mixer.
1015     *
1016     * @param audioData the array that holds the data to play.
1017     * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
1018     *     starts.
1019     * @param sizeInShorts the number of bytes to read in audioData after the offset.
1020     * @return the number of shorts that were written or {@link #ERROR_INVALID_OPERATION}
1021      *    if the object wasn't properly initialized, or {@link #ERROR_BAD_VALUE} if
1022      *    the parameters don't resolve to valid data and indexes.
1023     */
1024
1025    public int write(short[] audioData, int offsetInShorts, int sizeInShorts) {
1026
1027        if (mState == STATE_UNINITIALIZED) {
1028            return ERROR_INVALID_OPERATION;
1029        }
1030
1031        if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
1032                || (offsetInShorts + sizeInShorts < 0)  // detect integer overflow
1033                || (offsetInShorts + sizeInShorts > audioData.length)) {
1034            return ERROR_BAD_VALUE;
1035        }
1036
1037        int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat);
1038
1039        if ((mDataLoadMode == MODE_STATIC)
1040                && (mState == STATE_NO_STATIC_DATA)
1041                && (ret > 0)) {
1042            // benign race with respect to other APIs that read mState
1043            mState = STATE_INITIALIZED;
1044        }
1045
1046        return ret;
1047    }
1048
1049
1050    /**
1051     * Notifies the native resource to reuse the audio data already loaded in the native
1052     * layer. This call is only valid with AudioTrack instances that don't use the streaming
1053     * model.
1054     * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
1055     *  {@link #ERROR_INVALID_OPERATION}
1056     */
1057    public int reloadStaticData() {
1058        if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
1059            return ERROR_INVALID_OPERATION;
1060        }
1061        return native_reload_static();
1062    }
1063
1064    //--------------------------------------------------------------------------
1065    // Audio effects management
1066    //--------------------
1067
1068    /**
1069     * Attaches an auxiliary effect to the audio track. A typical auxiliary
1070     * effect is a reverberation effect which can be applied on any sound source
1071     * that directs a certain amount of its energy to this effect. This amount
1072     * is defined by setAuxEffectSendLevel().
1073     * {@see #setAuxEffectSendLevel(float)}.
1074     * <p>After creating an auxiliary effect (e.g.
1075     * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
1076     * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling
1077     * this method to attach the audio track to the effect.
1078     * <p>To detach the effect from the audio track, call this method with a
1079     * null effect id.
1080     *
1081     * @param effectId system wide unique id of the effect to attach
1082     * @return error code or success, see {@link #SUCCESS},
1083     *    {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE}
1084     */
1085    public int attachAuxEffect(int effectId) {
1086        if (mState == STATE_UNINITIALIZED) {
1087            return ERROR_INVALID_OPERATION;
1088        }
1089        return native_attachAuxEffect(effectId);
1090    }
1091
1092    /**
1093     * Sets the send level of the audio track to the attached auxiliary effect
1094     * {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
1095     * <p>By default the send level is 0, so even if an effect is attached to the player
1096     * this method must be called for the effect to be applied.
1097     * <p>Note that the passed level value is a raw scalar. UI controls should be scaled
1098     * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
1099     * so an appropriate conversion from linear UI input x to level is:
1100     * x == 0 -&gt; level = 0
1101     * 0 &lt; x &lt;= R -&gt; level = 10^(72*(x-R)/20/R)
1102     *
1103     * @param level send level scalar
1104     * @return error code or success, see {@link #SUCCESS},
1105     *    {@link #ERROR_INVALID_OPERATION}
1106     */
1107    public int setAuxEffectSendLevel(float level) {
1108        if (mState == STATE_UNINITIALIZED) {
1109            return ERROR_INVALID_OPERATION;
1110        }
1111        // clamp the level
1112        if (level < getMinVolume()) {
1113            level = getMinVolume();
1114        }
1115        if (level > getMaxVolume()) {
1116            level = getMaxVolume();
1117        }
1118        native_setAuxEffectSendLevel(level);
1119        return SUCCESS;
1120    }
1121
1122    //---------------------------------------------------------
1123    // Interface definitions
1124    //--------------------
1125    /**
1126     * Interface definition for a callback to be invoked when the playback head position of
1127     * an AudioTrack has reached a notification marker or has increased by a certain period.
1128     */
1129    public interface OnPlaybackPositionUpdateListener  {
1130        /**
1131         * Called on the listener to notify it that the previously set marker has been reached
1132         * by the playback head.
1133         */
1134        void onMarkerReached(AudioTrack track);
1135
1136        /**
1137         * Called on the listener to periodically notify it that the playback head has reached
1138         * a multiple of the notification period.
1139         */
1140        void onPeriodicNotification(AudioTrack track);
1141    }
1142
1143
1144    //---------------------------------------------------------
1145    // Inner classes
1146    //--------------------
1147    /**
1148     * Helper class to handle the forwarding of native events to the appropriate listener
1149     * (potentially) handled in a different thread
1150     */
1151    private class NativeEventHandlerDelegate {
1152        private final Handler mHandler;
1153
1154        NativeEventHandlerDelegate(final AudioTrack track,
1155                                   final OnPlaybackPositionUpdateListener listener,
1156                                   Handler handler) {
1157            // find the looper for our new event handler
1158            Looper looper;
1159            if (handler != null) {
1160                looper = handler.getLooper();
1161            } else {
1162                // no given handler, use the looper the AudioTrack was created in
1163                looper = mInitializationLooper;
1164            }
1165
1166            // construct the event handler with this looper
1167            if (looper != null) {
1168                // implement the event handler delegate
1169                mHandler = new Handler(looper) {
1170                    @Override
1171                    public void handleMessage(Message msg) {
1172                        if (track == null) {
1173                            return;
1174                        }
1175                        switch(msg.what) {
1176                        case NATIVE_EVENT_MARKER:
1177                            if (listener != null) {
1178                                listener.onMarkerReached(track);
1179                            }
1180                            break;
1181                        case NATIVE_EVENT_NEW_POS:
1182                            if (listener != null) {
1183                                listener.onPeriodicNotification(track);
1184                            }
1185                            break;
1186                        default:
1187                            loge("Unknown native event type: " + msg.what);
1188                            break;
1189                        }
1190                    }
1191                };
1192            } else {
1193                mHandler = null;
1194            }
1195        }
1196
1197        Handler getHandler() {
1198            return mHandler;
1199        }
1200    }
1201
1202
1203    //---------------------------------------------------------
1204    // Java methods called from the native side
1205    //--------------------
1206    @SuppressWarnings("unused")
1207    private static void postEventFromNative(Object audiotrack_ref,
1208            int what, int arg1, int arg2, Object obj) {
1209        //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
1210        AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get();
1211        if (track == null) {
1212            return;
1213        }
1214
1215        NativeEventHandlerDelegate delegate = track.mEventHandlerDelegate;
1216        if (delegate != null) {
1217            Handler handler = delegate.getHandler();
1218            if (handler != null) {
1219                Message m = handler.obtainMessage(what, arg1, arg2, obj);
1220                handler.sendMessage(m);
1221            }
1222        }
1223
1224    }
1225
1226
1227    //---------------------------------------------------------
1228    // Native methods called from the Java side
1229    //--------------------
1230
1231    private native final int native_setup(Object audiotrack_this,
1232            int streamType, int sampleRate, int nbChannels, int audioFormat,
1233            int buffSizeInBytes, int mode, int[] sessionId);
1234
1235    private native final void native_finalize();
1236
1237    private native final void native_release();
1238
1239    private native final void native_start();
1240
1241    private native final void native_stop();
1242
1243    private native final void native_pause();
1244
1245    private native final void native_flush();
1246
1247    private native final int native_write_byte(byte[] audioData,
1248                                               int offsetInBytes, int sizeInBytes, int format);
1249
1250    private native final int native_write_short(short[] audioData,
1251                                                int offsetInShorts, int sizeInShorts, int format);
1252
1253    private native final int native_reload_static();
1254
1255    private native final int native_get_native_frame_count();
1256
1257    private native final void native_setVolume(float leftVolume, float rightVolume);
1258
1259    private native final int native_set_playback_rate(int sampleRateInHz);
1260    private native final int native_get_playback_rate();
1261
1262    private native final int native_set_marker_pos(int marker);
1263    private native final int native_get_marker_pos();
1264
1265    private native final int native_set_pos_update_period(int updatePeriod);
1266    private native final int native_get_pos_update_period();
1267
1268    private native final int native_set_position(int position);
1269    private native final int native_get_position();
1270
1271    private native final int native_get_latency();
1272
1273    private native final int native_set_loop(int start, int end, int loopCount);
1274
1275    static private native final int native_get_output_sample_rate(int streamType);
1276    static private native final int native_get_min_buff_size(
1277            int sampleRateInHz, int channelConfig, int audioFormat);
1278
1279    private native final int native_attachAuxEffect(int effectId);
1280    private native final void native_setAuxEffectSendLevel(float level);
1281
1282    //---------------------------------------------------------
1283    // Utility methods
1284    //------------------
1285
1286    private static void logd(String msg) {
1287        Log.d(TAG, msg);
1288    }
1289
1290    private static void loge(String msg) {
1291        Log.e(TAG, msg);
1292    }
1293
1294}
1295