MediaSync.java revision d80d6f6b056e23a8d017fe8f3900f6a88e27aa1b
1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import android.annotation.IntDef;
20import android.annotation.NonNull;
21import android.annotation.Nullable;
22import android.media.AudioTrack;
23import android.media.PlaybackSettings;
24import android.os.Handler;
25import android.os.Looper;
26import android.os.Message;
27import android.view.Surface;
28
29import java.lang.annotation.Retention;
30import java.lang.annotation.RetentionPolicy;
31import java.nio.ByteBuffer;
32import java.util.concurrent.TimeUnit;
33import java.util.LinkedList;
34import java.util.List;
35
36/**
37 * MediaSync class can be used to synchronously playback audio and video streams.
38 * It can be used to play audio-only or video-only stream, too.
39 *
40 * <p>MediaSync is generally used like this:
41 * <pre>
42 * MediaSync sync = new MediaSync();
43 * sync.setSurface(surface);
44 * Surface inputSurface = sync.createInputSurface();
45 * ...
46 * // MediaCodec videoDecoder = ...;
47 * videoDecoder.configure(format, inputSurface, ...);
48 * ...
49 * sync.setAudioTrack(audioTrack);
50 * sync.setCallback(new MediaSync.Callback() {
51 *     {@literal @Override}
52 *     public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferIndex) {
53 *         ...
54 *     }
55 * }, null);
56 * // This needs to be done since sync is paused on creation.
57 * sync.setPlaybackRate(1.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
58 *
59 * for (;;) {
60 *   ...
61 *   // send video frames to surface for rendering, e.g., call
62 *   // videoDecoder.releaseOutputBuffer(videoOutputBufferIx, videoPresentationTimeNs);
63 *   // More details are available as below.
64 *   ...
65 *   sync.queueAudio(audioByteBuffer, bufferIndex, size, audioPresentationTimeUs); // non-blocking.
66 *   // The audioByteBuffer and bufferIndex will be returned via callback.
67 *   // More details are available as below.
68 *   ...
69 *     ...
70 * }
71 * sync.setPlaybackRate(0.0f, MediaSync.PLAYBACK_RATE_AUDIO_MODE_RESAMPLE);
72 * sync.release();
73 * sync = null;
74 *
75 * // The following code snippet illustrates how video/audio raw frames are created by
76 * // MediaCodec's, how they are fed to MediaSync and how they are returned by MediaSync.
77 * // This is the callback from MediaCodec.
78 * onOutputBufferAvailable(MediaCodec codec, int bufferIndex, BufferInfo info) {
79 *     // ...
80 *     if (codec == videoDecoder) {
81 *         // surface timestamp must contain media presentation time in nanoseconds.
82 *         codec.releaseOutputBuffer(bufferIndex, 1000 * info.presentationTime);
83 *     } else {
84 *         ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferIndex);
85 *         sync.queueByteBuffer(audioByteBuffer, bufferIndex, info.size, info.presentationTime);
86 *     }
87 *     // ...
88 * }
89 *
90 * // This is the callback from MediaSync.
91 * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferIndex) {
92 *     // ...
93 *     audioDecoder.releaseBuffer(bufferIndex, false);
94 *     // ...
95 * }
96 *
97 * </pre>
98 *
99 * The client needs to configure corresponding sink by setting the Surface and/or AudioTrack
100 * based on the stream type it will play.
101 * <p>
102 * For video, the client needs to call {@link #createInputSurface} to obtain a surface on
103 * which it will render video frames.
104 * <p>
105 * For audio, the client needs to set up audio track correctly, e.g., using {@link
106 * AudioTrack#MODE_STREAM}. The audio buffers are sent to MediaSync directly via {@link
107 * #queueAudio}, and are returned to the client via {@link Callback#onAudioBufferConsumed}
108 * asynchronously. The client should not modify an audio buffer till it's returned.
109 * <p>
110 * The client can optionally pre-fill audio/video buffers by setting playback rate to 0.0,
111 * and then feed audio/video buffers to corresponding components. This can reduce possible
112 * initial underrun.
113 * <p>
114 */
115final public class MediaSync {
116    /**
117     * MediaSync callback interface. Used to notify the user asynchronously
118     * of various MediaSync events.
119     */
120    public static abstract class Callback {
121        /**
122         * Called when returning an audio buffer which has been consumed.
123         *
124         * @param sync The MediaSync object.
125         * @param audioBuffer The returned audio buffer.
126         * @param bufferIndex The index associated with the audio buffer
127         */
128        public abstract void onAudioBufferConsumed(
129                @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferIndex);
130    }
131
132    /** Audio track failed.
133     * @see android.media.MediaSync.OnErrorListener
134     */
135    public static final int MEDIASYNC_ERROR_AUDIOTRACK_FAIL = 1;
136
137    /** The surface failed to handle video buffers.
138     * @see android.media.MediaSync.OnErrorListener
139     */
140    public static final int MEDIASYNC_ERROR_SURFACE_FAIL = 2;
141
142    /**
143     * Interface definition of a callback to be invoked when there
144     * has been an error during an asynchronous operation (other errors
145     * will throw exceptions at method call time).
146     */
147    public interface OnErrorListener {
148        /**
149         * Called to indicate an error.
150         *
151         * @param sync The MediaSync the error pertains to
152         * @param what The type of error that has occurred:
153         * <ul>
154         * <li>{@link #MEDIASYNC_ERROR_AUDIOTRACK_FAIL}
155         * <li>{@link #MEDIASYNC_ERROR_SURFACE_FAIL}
156         * </ul>
157         * @param extra an extra code, specific to the error. Typically
158         * implementation dependent.
159         */
160        void onError(@NonNull MediaSync sync, int what, int extra);
161    }
162
163    private static final String TAG = "MediaSync";
164
165    private static final int EVENT_CALLBACK = 1;
166    private static final int EVENT_SET_CALLBACK = 2;
167
168    private static final int CB_RETURN_AUDIO_BUFFER = 1;
169
170    private static class AudioBuffer {
171        public ByteBuffer mByteBuffer;
172        public int mBufferIndex;
173        public int mSizeInBytes;
174        long mPresentationTimeUs;
175
176        public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferIndex,
177                           int sizeInBytes, long presentationTimeUs) {
178            mByteBuffer = byteBuffer;
179            mBufferIndex = bufferIndex;
180            mSizeInBytes = sizeInBytes;
181            mPresentationTimeUs = presentationTimeUs;
182        }
183    }
184
185    private final Object mCallbackLock = new Object();
186    private Handler mCallbackHandler = null;
187    private MediaSync.Callback mCallback = null;
188
189    private final Object mOnErrorListenerLock = new Object();
190    private Handler mOnErrorListenerHandler = null;
191    private MediaSync.OnErrorListener mOnErrorListener = null;
192
193    private Thread mAudioThread = null;
194    // Created on mAudioThread when mAudioThread is started. When used on user thread, they should
195    // be guarded by checking mAudioThread.
196    private Handler mAudioHandler = null;
197    private Looper mAudioLooper = null;
198
199    private final Object mAudioLock = new Object();
200    private AudioTrack mAudioTrack = null;
201    private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>();
202    // this is only used for paused/running decisions, so it is not affected by clock drift
203    private float mPlaybackRate = 0.0f;
204
205    private long mNativeContext;
206
207    /**
208     * Class constructor. On creation, MediaSync is paused, i.e., playback rate is 0.0f.
209     */
210    public MediaSync() {
211        native_setup();
212    }
213
214    private native final void native_setup();
215
216    @Override
217    protected void finalize() {
218        native_finalize();
219    }
220
221    private native final void native_finalize();
222
223    /**
224     * Make sure you call this when you're done to free up any opened
225     * component instance instead of relying on the garbage collector
226     * to do this for you at some point in the future.
227     */
228    public final void release() {
229        returnAudioBuffers();
230        if (mAudioThread != null) {
231            if (mAudioLooper != null) {
232                mAudioLooper.quit();
233            }
234        }
235        setCallback(null, null);
236        native_release();
237    }
238
239    private native final void native_release();
240
241    /**
242     * Sets an asynchronous callback for actionable MediaSync events.
243     * <p>
244     * This method can be called multiple times to update a previously set callback. If the
245     * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
246     * <p>
247     * <b>Do not call this inside callback.</b>
248     *
249     * @param cb The callback that will run. Use {@code null} to stop receiving callbacks.
250     * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
251     *     internal handler if it exists.
252     */
253    public void setCallback(@Nullable /* MediaSync. */ Callback cb, @Nullable Handler handler) {
254        synchronized(mCallbackLock) {
255            if (handler != null) {
256                mCallbackHandler = handler;
257            } else {
258                Looper looper;
259                if ((looper = Looper.myLooper()) == null) {
260                    looper = Looper.getMainLooper();
261                }
262                if (looper == null) {
263                    mCallbackHandler = null;
264                } else {
265                    mCallbackHandler = new Handler(looper);
266                }
267            }
268
269            mCallback = cb;
270        }
271    }
272
273    /**
274     * Sets an asynchronous callback for error events.
275     * <p>
276     * This method can be called multiple times to update a previously set listener. If the
277     * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
278     * <p>
279     * <b>Do not call this inside callback.</b>
280     *
281     * @param listener The callback that will run. Use {@code null} to stop receiving callbacks.
282     * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
283     *     internal handler if it exists.
284     */
285    public void setOnErrorListener(@Nullable /* MediaSync. */ OnErrorListener listener,
286            @Nullable Handler handler) {
287        synchronized(mOnErrorListenerLock) {
288            if (handler != null) {
289                mOnErrorListenerHandler = handler;
290            } else {
291                Looper looper;
292                if ((looper = Looper.myLooper()) == null) {
293                    looper = Looper.getMainLooper();
294                }
295                if (looper == null) {
296                    mOnErrorListenerHandler = null;
297                } else {
298                    mOnErrorListenerHandler = new Handler(looper);
299                }
300            }
301
302            mOnErrorListener = listener;
303        }
304    }
305
306    /**
307     * Sets the output surface for MediaSync.
308     * <p>
309     * Currently, this is only supported in the Initialized state.
310     *
311     * @param surface Specify a surface on which to render the video data.
312     * @throws IllegalArgumentException if the surface has been released, is invalid,
313     *     or can not be connected.
314     * @throws IllegalStateException if setting the surface is not supported, e.g.
315     *     not in the Initialized state, or another surface has already been set.
316     */
317    public void setSurface(@Nullable Surface surface) {
318        native_setSurface(surface);
319    }
320
321    private native final void native_setSurface(@Nullable Surface surface);
322
323    /**
324     * Sets the audio track for MediaSync.
325     * <p>
326     * Currently, this is only supported in the Initialized state.
327     *
328     * @param audioTrack Specify an AudioTrack through which to render the audio data.
329     * @throws IllegalArgumentException if the audioTrack has been released, or is invalid.
330     * @throws IllegalStateException if setting the audio track is not supported, e.g.
331     *     not in the Initialized state, or another audio track has already been set.
332     */
333    public void setAudioTrack(@Nullable AudioTrack audioTrack) {
334        native_setAudioTrack(audioTrack);
335        mAudioTrack = audioTrack;
336        if (audioTrack != null && mAudioThread == null) {
337            createAudioThread();
338        }
339    }
340
341    private native final void native_setAudioTrack(@Nullable AudioTrack audioTrack);
342
343    /**
344     * Requests a Surface to use as the input. This may only be called after
345     * {@link #setSurface}.
346     * <p>
347     * The application is responsible for calling release() on the Surface when
348     * done.
349     * @throws IllegalStateException if not set, or another input surface has
350     *     already been created.
351     */
352    @NonNull
353    public native final Surface createInputSurface();
354
355    /**
356     * Resample audio data when changing playback speed.
357     * <p>
358     * Resample the waveform based on the requested playback rate to get
359     * a new waveform, and play back the new waveform at the original sampling
360     * frequency.
361     * <p><ul>
362     * <li>When rate is larger than 1.0, pitch becomes higher.
363     * <li>When rate is smaller than 1.0, pitch becomes lower.
364     * </ul>
365     */
366    public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
367
368    /**
369     * Time stretch audio when changing playback speed.
370     * <p>
371     * Time stretching changes the duration of the audio samples without
372     * affecting their pitch. This is only supported for a limited range
373     * of playback speeds, e.g. from 1/2x to 2x. If the rate is adjusted
374     * beyond this limit, the rate change will fail.
375     */
376    public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
377
378    /**
379     * Time stretch audio when changing playback speed, and may mute if
380     * stretching is no longer supported.
381     * <p>
382     * Time stretching changes the duration of the audio samples without
383     * affecting their pitch. This is only supported for a limited range
384     * of playback speeds, e.g. from 1/2x to 2x. When it is no longer
385     * supported, the audio may be muted.  Using this mode will not fail
386     * for non-negative playback rates.
387     */
388    public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
389
390    /** @hide */
391    @IntDef(
392        value = {
393            PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
394            PLAYBACK_RATE_AUDIO_MODE_STRETCH,
395            PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
396        })
397    @Retention(RetentionPolicy.SOURCE)
398    public @interface PlaybackRateAudioMode {}
399
400    /**
401     * Sets playback rate and audio mode.
402     *
403     * @param rate the ratio between desired playback rate and normal one. 1.0 means normal
404     *     playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
405     *     while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
406     *     does not change as a result of this call. To restore the original rate at any time,
407     *     use 1.0.
408     * @param audioMode audio playback mode. Must be one of the supported
409     *     audio modes.
410     *
411     * @throws IllegalStateException if the internal sync engine or the audio track has not
412     *     been initialized.
413     * @throws IllegalArgumentException if audioMode is not supported.
414     */
415    public void setPlaybackRate(float rate, @PlaybackRateAudioMode int audioMode) {
416        PlaybackSettings rateSettings = new PlaybackSettings();
417        rateSettings.allowDefaults();
418        switch (audioMode) {
419            case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
420                rateSettings.setSpeed(rate).setPitch(1.0f);
421                break;
422            case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
423                rateSettings.setSpeed(rate).setPitch(1.0f)
424                        .setAudioFallbackMode(rateSettings.AUDIO_FALLBACK_MODE_FAIL);
425                break;
426            case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
427                rateSettings.setSpeed(rate).setPitch(rate);
428                break;
429            default:
430            {
431                final String msg = "Audio playback mode " + audioMode + " is not supported";
432                throw new IllegalArgumentException(msg);
433            }
434        }
435        setPlaybackSettings(rateSettings);
436    }
437
438    /**
439     * Sets playback rate using {@link PlaybackSettings}.
440     * <p>
441     * When using MediaSync with {@link AudioTrack}, set playback settings using this
442     * call instead of calling it directly on the track, so that the sync is aware of
443     * the settings change.
444     * <p>
445     * This call also works if there is no audio track.
446     *
447     * @param settings the playback settings to use. {@link PlaybackSettings#getSpeed
448     *     Speed} is the ratio between desired playback rate and normal one. 1.0 means
449     *     normal playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
450     *     while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
451     *     does not change as a result of this call. To restore the original rate at any time,
452     *     use speed of 1.0.
453     *
454     * @throws IllegalStateException if the internal sync engine or the audio track has not
455     *     been initialized.
456     * @throws IllegalArgumentException if the settings are not supported.
457     */
458    public void setPlaybackSettings(@NonNull PlaybackSettings settings) {
459        synchronized(mAudioLock) {
460            mPlaybackRate = native_setPlaybackSettings(settings);;
461        }
462        if (mPlaybackRate != 0.0 && mAudioThread != null) {
463            postRenderAudio(0);
464        }
465    }
466
467    /**
468     * Gets the playback rate using {@link PlaybackSettings}.
469     *
470     * @return the playback rate being used.
471     *
472     * @throws IllegalStateException if the internal sync engine or the audio track has not
473     *     been initialized.
474     */
475    @NonNull
476    public native PlaybackSettings getPlaybackSettings();
477
478    private native float native_setPlaybackSettings(@NonNull PlaybackSettings settings);
479
480    /**
481     * Sets A/V sync mode.
482     *
483     * @param settings the A/V sync settings to apply
484     *
485     * @throws IllegalStateException if the internal player engine has not been
486     * initialized.
487     * @throws IllegalArgumentException if settings are not supported.
488     */
489    public void setSyncSettings(@NonNull SyncSettings settings) {
490        synchronized(mAudioLock) {
491            mPlaybackRate = native_setSyncSettings(settings);;
492        }
493        if (mPlaybackRate != 0.0 && mAudioThread != null) {
494            postRenderAudio(0);
495        }
496    }
497
498    private native float native_setSyncSettings(@NonNull SyncSettings settings);
499
500    /**
501     * Gets the A/V sync mode.
502     *
503     * @return the A/V sync settings
504     *
505     * @throws IllegalStateException if the internal player engine has not been
506     * initialized.
507     */
508    @NonNull
509    public native SyncSettings getSyncSettings();
510
511    /**
512     * Flushes all buffers from the sync object.
513     * <p>
514     * No callbacks are received for the flushed buffers.
515     *
516     * @throws IllegalStateException if the internal player engine has not been
517     * initialized.
518     */
519    public void flush() {
520        synchronized(mAudioLock) {
521            mAudioBuffers.clear();
522            mCallbackHandler.removeCallbacksAndMessages(null);
523        }
524        // TODO implement this for surface buffers.
525    }
526
527   /**
528    * Get current playback position.
529    * <p>
530    * The MediaTimestamp represents how the media time correlates to the system time in
531    * a linear fashion. It contains the media time and system timestamp of an anchor frame
532    * ({@link MediaTimestamp#mediaTimeUs} and {@link MediaTimestamp#nanoTime})
533    * and the speed of the media clock ({@link MediaTimestamp#clockRate}).
534    * <p>
535    * During regular playback, the media time moves fairly constantly (though the
536    * anchor frame may be rebased to a current system time, the linear correlation stays
537    * steady). Therefore, this method does not need to be called often.
538    * <p>
539    * To help users to get current playback position, this method always returns the timestamp of
540    * just-rendered frame, i.e., {@link System#nanoTime} and its corresponding media time. They
541    * can be used as current playback position.
542    *
543    * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
544    *         is available, e.g. because the media sync has not been initialized.
545    */
546    @Nullable
547    public MediaTimestamp getTimestamp()
548    {
549        try {
550            // TODO: create the timestamp in native
551            MediaTimestamp timestamp = new MediaTimestamp();
552            if (native_getTimestamp(timestamp)) {
553                return timestamp;
554            } else {
555                return null;
556            }
557        } catch (IllegalStateException e) {
558            return null;
559        }
560    }
561
562    private native final boolean native_getTimestamp(@NonNull MediaTimestamp timestamp);
563
564    /**
565     * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode).
566     * @param audioData the buffer that holds the data to play. This buffer will be returned
567     *     to the client via registered callback.
568     * @param bufferIndex the buffer index used to identify audioData. It will be returned to
569     *     the client along with audioData. This helps applications to keep track of audioData.
570     * @param sizeInBytes number of bytes to queue.
571     * @param presentationTimeUs the presentation timestamp in microseconds for the first frame
572     *     in the buffer.
573     * @throws IllegalStateException if audio track is not set or internal configureation
574     *     has not been done correctly.
575     */
576    public void queueAudio(
577            @NonNull ByteBuffer audioData, int bufferIndex, int sizeInBytes,
578            long presentationTimeUs) {
579        if (mAudioTrack == null || mAudioThread == null) {
580            throw new IllegalStateException(
581                    "AudioTrack is NOT set or audio thread is not created");
582        }
583
584        synchronized(mAudioLock) {
585            mAudioBuffers.add(new AudioBuffer(
586                    audioData, bufferIndex, sizeInBytes, presentationTimeUs));
587        }
588
589        if (mPlaybackRate != 0.0) {
590            postRenderAudio(0);
591        }
592    }
593
594    // When called on user thread, make sure to check mAudioThread != null.
595    private void postRenderAudio(long delayMillis) {
596        mAudioHandler.postDelayed(new Runnable() {
597            public void run() {
598                synchronized(mAudioLock) {
599                    if (mPlaybackRate == 0.0) {
600                        return;
601                    }
602
603                    if (mAudioBuffers.isEmpty()) {
604                        return;
605                    }
606
607                    AudioBuffer audioBuffer = mAudioBuffers.get(0);
608                    int sizeWritten = mAudioTrack.write(
609                            audioBuffer.mByteBuffer,
610                            audioBuffer.mSizeInBytes,
611                            AudioTrack.WRITE_NON_BLOCKING);
612                    if (sizeWritten > 0) {
613                        if (audioBuffer.mPresentationTimeUs != -1) {
614                            native_updateQueuedAudioData(
615                                    audioBuffer.mSizeInBytes, audioBuffer.mPresentationTimeUs);
616                            audioBuffer.mPresentationTimeUs = -1;
617                        }
618
619                        if (sizeWritten == audioBuffer.mSizeInBytes) {
620                            postReturnByteBuffer(audioBuffer);
621                            mAudioBuffers.remove(0);
622                            if (!mAudioBuffers.isEmpty()) {
623                                postRenderAudio(0);
624                            }
625                            return;
626                        }
627
628                        audioBuffer.mSizeInBytes -= sizeWritten;
629                    }
630                    long pendingTimeMs = TimeUnit.MICROSECONDS.toMillis(
631                            native_getPlayTimeForPendingAudioFrames());
632                    postRenderAudio(pendingTimeMs / 2);
633                }
634            }
635        }, delayMillis);
636    }
637
638    private native final void native_updateQueuedAudioData(
639            int sizeInBytes, long presentationTimeUs);
640
641    private native final long native_getPlayTimeForPendingAudioFrames();
642
643    private final void postReturnByteBuffer(@NonNull final AudioBuffer audioBuffer) {
644        synchronized(mCallbackLock) {
645            if (mCallbackHandler != null) {
646                final MediaSync sync = this;
647                mCallbackHandler.post(new Runnable() {
648                    public void run() {
649                        synchronized(mCallbackLock) {
650                            if (mCallbackHandler == null
651                                    || mCallbackHandler.getLooper().getThread()
652                                            != Thread.currentThread()) {
653                                // callback handler has been changed.
654                                return;
655                            }
656                            if (mCallback != null) {
657                                mCallback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer,
658                                        audioBuffer.mBufferIndex);
659                            }
660                        }
661                    }
662                });
663            }
664        }
665    }
666
667    private final void returnAudioBuffers() {
668        synchronized(mAudioLock) {
669            for (AudioBuffer audioBuffer: mAudioBuffers) {
670                postReturnByteBuffer(audioBuffer);
671            }
672            mAudioBuffers.clear();
673        }
674    }
675
676    private void createAudioThread() {
677        mAudioThread = new Thread() {
678            @Override
679            public void run() {
680                Looper.prepare();
681                synchronized(mAudioLock) {
682                    mAudioLooper = Looper.myLooper();
683                    mAudioHandler = new Handler();
684                    mAudioLock.notify();
685                }
686                Looper.loop();
687            }
688        };
689        mAudioThread.start();
690
691        synchronized(mAudioLock) {
692            try {
693                mAudioLock.wait();
694            } catch(InterruptedException e) {
695            }
696        }
697    }
698
699    static {
700        System.loadLibrary("media_jni");
701        native_init();
702    }
703
704    private static native final void native_init();
705}
706