MediaPlayer.java revision e5fb79bb2def5a17d158193c2e668224dc5ad5c1
1/*
2 * Copyright (C) 2006 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.media;
18
19import android.annotation.IntDef;
20import android.annotation.NonNull;
21import android.annotation.Nullable;
22import android.app.ActivityThread;
23import android.content.ContentProvider;
24import android.content.ContentResolver;
25import android.content.Context;
26import android.content.res.AssetFileDescriptor;
27import android.net.Uri;
28import android.os.Handler;
29import android.os.HandlerThread;
30import android.os.IBinder;
31import android.os.Looper;
32import android.os.Message;
33import android.os.Parcel;
34import android.os.Parcelable;
35import android.os.Process;
36import android.os.PowerManager;
37import android.os.SystemProperties;
38import android.provider.Settings;
39import android.system.ErrnoException;
40import android.system.OsConstants;
41import android.util.Log;
42import android.util.Pair;
43import android.view.Surface;
44import android.view.SurfaceHolder;
45import android.widget.VideoView;
46import android.graphics.SurfaceTexture;
47import android.media.AudioManager;
48import android.media.MediaFormat;
49import android.media.MediaTimeProvider;
50import android.media.PlaybackParams;
51import android.media.SubtitleController;
52import android.media.SubtitleController.Anchor;
53import android.media.SubtitleData;
54import android.media.SubtitleTrack.RenderingWidget;
55import android.media.SyncParams;
56
57import com.android.internal.util.Preconditions;
58
59import libcore.io.IoBridge;
60import libcore.io.Libcore;
61
62import java.io.ByteArrayOutputStream;
63import java.io.File;
64import java.io.FileDescriptor;
65import java.io.FileInputStream;
66import java.io.IOException;
67import java.io.InputStream;
68import java.lang.Runnable;
69import java.lang.annotation.Retention;
70import java.lang.annotation.RetentionPolicy;
71import java.net.InetSocketAddress;
72import java.util.BitSet;
73import java.util.Map;
74import java.util.Scanner;
75import java.util.Set;
76import java.util.Vector;
77import java.lang.ref.WeakReference;
78
79/**
80 * MediaPlayer class can be used to control playback
81 * of audio/video files and streams. An example on how to use the methods in
82 * this class can be found in {@link android.widget.VideoView}.
83 *
84 * <p>Topics covered here are:
85 * <ol>
86 * <li><a href="#StateDiagram">State Diagram</a>
87 * <li><a href="#Valid_and_Invalid_States">Valid and Invalid States</a>
88 * <li><a href="#Permissions">Permissions</a>
89 * <li><a href="#Callbacks">Register informational and error callbacks</a>
90 * </ol>
91 *
92 * <div class="special reference">
93 * <h3>Developer Guides</h3>
94 * <p>For more information about how to use MediaPlayer, read the
95 * <a href="{@docRoot}guide/topics/media/mediaplayer.html">Media Playback</a> developer guide.</p>
96 * </div>
97 *
98 * <a name="StateDiagram"></a>
99 * <h3>State Diagram</h3>
100 *
101 * <p>Playback control of audio/video files and streams is managed as a state
102 * machine. The following diagram shows the life cycle and the states of a
103 * MediaPlayer object driven by the supported playback control operations.
104 * The ovals represent the states a MediaPlayer object may reside
105 * in. The arcs represent the playback control operations that drive the object
106 * state transition. There are two types of arcs. The arcs with a single arrow
107 * head represent synchronous method calls, while those with
108 * a double arrow head represent asynchronous method calls.</p>
109 *
110 * <p><img src="../../../images/mediaplayer_state_diagram.gif"
111 *         alt="MediaPlayer State diagram"
112 *         border="0" /></p>
113 *
114 * <p>From this state diagram, one can see that a MediaPlayer object has the
115 *    following states:</p>
116 * <ul>
117 *     <li>When a MediaPlayer object is just created using <code>new</code> or
118 *         after {@link #reset()} is called, it is in the <em>Idle</em> state; and after
119 *         {@link #release()} is called, it is in the <em>End</em> state. Between these
120 *         two states is the life cycle of the MediaPlayer object.
121 *         <ul>
122 *         <li>There is a subtle but important difference between a newly constructed
123 *         MediaPlayer object and the MediaPlayer object after {@link #reset()}
124 *         is called. It is a programming error to invoke methods such
125 *         as {@link #getCurrentPosition()},
126 *         {@link #getDuration()}, {@link #getVideoHeight()},
127 *         {@link #getVideoWidth()}, {@link #setAudioAttributes(AudioAttributes)},
128 *         {@link #setLooping(boolean)},
129 *         {@link #setVolume(float, float)}, {@link #pause()}, {@link #start()},
130 *         {@link #stop()}, {@link #seekTo(int, int)}, {@link #prepare()} or
131 *         {@link #prepareAsync()} in the <em>Idle</em> state for both cases. If any of these
132 *         methods is called right after a MediaPlayer object is constructed,
133 *         the user supplied callback method OnErrorListener.onError() won't be
134 *         called by the internal player engine and the object state remains
135 *         unchanged; but if these methods are called right after {@link #reset()},
136 *         the user supplied callback method OnErrorListener.onError() will be
137 *         invoked by the internal player engine and the object will be
138 *         transfered to the <em>Error</em> state. </li>
139 *         <li>It is also recommended that once
140 *         a MediaPlayer object is no longer being used, call {@link #release()} immediately
141 *         so that resources used by the internal player engine associated with the
142 *         MediaPlayer object can be released immediately. Resource may include
143 *         singleton resources such as hardware acceleration components and
144 *         failure to call {@link #release()} may cause subsequent instances of
145 *         MediaPlayer objects to fallback to software implementations or fail
146 *         altogether. Once the MediaPlayer
147 *         object is in the <em>End</em> state, it can no longer be used and
148 *         there is no way to bring it back to any other state. </li>
149 *         <li>Furthermore,
150 *         the MediaPlayer objects created using <code>new</code> is in the
151 *         <em>Idle</em> state, while those created with one
152 *         of the overloaded convenient <code>create</code> methods are <em>NOT</em>
153 *         in the <em>Idle</em> state. In fact, the objects are in the <em>Prepared</em>
154 *         state if the creation using <code>create</code> method is successful.
155 *         </li>
156 *         </ul>
157 *         </li>
158 *     <li>In general, some playback control operation may fail due to various
159 *         reasons, such as unsupported audio/video format, poorly interleaved
160 *         audio/video, resolution too high, streaming timeout, and the like.
161 *         Thus, error reporting and recovery is an important concern under
162 *         these circumstances. Sometimes, due to programming errors, invoking a playback
163 *         control operation in an invalid state may also occur. Under all these
164 *         error conditions, the internal player engine invokes a user supplied
165 *         OnErrorListener.onError() method if an OnErrorListener has been
166 *         registered beforehand via
167 *         {@link #setOnErrorListener(android.media.MediaPlayer.OnErrorListener)}.
168 *         <ul>
169 *         <li>It is important to note that once an error occurs, the
170 *         MediaPlayer object enters the <em>Error</em> state (except as noted
171 *         above), even if an error listener has not been registered by the application.</li>
172 *         <li>In order to reuse a MediaPlayer object that is in the <em>
173 *         Error</em> state and recover from the error,
174 *         {@link #reset()} can be called to restore the object to its <em>Idle</em>
175 *         state.</li>
176 *         <li>It is good programming practice to have your application
177 *         register a OnErrorListener to look out for error notifications from
178 *         the internal player engine.</li>
179 *         <li>IllegalStateException is
180 *         thrown to prevent programming errors such as calling {@link #prepare()},
181 *         {@link #prepareAsync()}, or one of the overloaded <code>setDataSource
182 *         </code> methods in an invalid state. </li>
183 *         </ul>
184 *         </li>
185 *     <li>Calling
186 *         {@link #setDataSource(FileDescriptor)}, or
187 *         {@link #setDataSource(String)}, or
188 *         {@link #setDataSource(Context, Uri)}, or
189 *         {@link #setDataSource(FileDescriptor, long, long)}, or
190 *         {@link #setDataSource(MediaDataSource)} transfers a
191 *         MediaPlayer object in the <em>Idle</em> state to the
192 *         <em>Initialized</em> state.
193 *         <ul>
194 *         <li>An IllegalStateException is thrown if
195 *         setDataSource() is called in any other state.</li>
196 *         <li>It is good programming
197 *         practice to always look out for <code>IllegalArgumentException</code>
198 *         and <code>IOException</code> that may be thrown from the overloaded
199 *         <code>setDataSource</code> methods.</li>
200 *         </ul>
201 *         </li>
202 *     <li>A MediaPlayer object must first enter the <em>Prepared</em> state
203 *         before playback can be started.
204 *         <ul>
205 *         <li>There are two ways (synchronous vs.
206 *         asynchronous) that the <em>Prepared</em> state can be reached:
207 *         either a call to {@link #prepare()} (synchronous) which
208 *         transfers the object to the <em>Prepared</em> state once the method call
209 *         returns, or a call to {@link #prepareAsync()} (asynchronous) which
210 *         first transfers the object to the <em>Preparing</em> state after the
211 *         call returns (which occurs almost right way) while the internal
212 *         player engine continues working on the rest of preparation work
213 *         until the preparation work completes. When the preparation completes or when {@link #prepare()} call returns,
214 *         the internal player engine then calls a user supplied callback method,
215 *         onPrepared() of the OnPreparedListener interface, if an
216 *         OnPreparedListener is registered beforehand via {@link
217 *         #setOnPreparedListener(android.media.MediaPlayer.OnPreparedListener)}.</li>
218 *         <li>It is important to note that
219 *         the <em>Preparing</em> state is a transient state, and the behavior
220 *         of calling any method with side effect while a MediaPlayer object is
221 *         in the <em>Preparing</em> state is undefined.</li>
222 *         <li>An IllegalStateException is
223 *         thrown if {@link #prepare()} or {@link #prepareAsync()} is called in
224 *         any other state.</li>
225 *         <li>While in the <em>Prepared</em> state, properties
226 *         such as audio/sound volume, screenOnWhilePlaying, looping can be
227 *         adjusted by invoking the corresponding set methods.</li>
228 *         </ul>
229 *         </li>
230 *     <li>To start the playback, {@link #start()} must be called. After
231 *         {@link #start()} returns successfully, the MediaPlayer object is in the
232 *         <em>Started</em> state. {@link #isPlaying()} can be called to test
233 *         whether the MediaPlayer object is in the <em>Started</em> state.
234 *         <ul>
235 *         <li>While in the <em>Started</em> state, the internal player engine calls
236 *         a user supplied OnBufferingUpdateListener.onBufferingUpdate() callback
237 *         method if a OnBufferingUpdateListener has been registered beforehand
238 *         via {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}.
239 *         This callback allows applications to keep track of the buffering status
240 *         while streaming audio/video.</li>
241 *         <li>Calling {@link #start()} has not effect
242 *         on a MediaPlayer object that is already in the <em>Started</em> state.</li>
243 *         </ul>
244 *         </li>
245 *     <li>Playback can be paused and stopped, and the current playback position
246 *         can be adjusted. Playback can be paused via {@link #pause()}. When the call to
247 *         {@link #pause()} returns, the MediaPlayer object enters the
248 *         <em>Paused</em> state. Note that the transition from the <em>Started</em>
249 *         state to the <em>Paused</em> state and vice versa happens
250 *         asynchronously in the player engine. It may take some time before
251 *         the state is updated in calls to {@link #isPlaying()}, and it can be
252 *         a number of seconds in the case of streamed content.
253 *         <ul>
254 *         <li>Calling {@link #start()} to resume playback for a paused
255 *         MediaPlayer object, and the resumed playback
256 *         position is the same as where it was paused. When the call to
257 *         {@link #start()} returns, the paused MediaPlayer object goes back to
258 *         the <em>Started</em> state.</li>
259 *         <li>Calling {@link #pause()} has no effect on
260 *         a MediaPlayer object that is already in the <em>Paused</em> state.</li>
261 *         </ul>
262 *         </li>
263 *     <li>Calling  {@link #stop()} stops playback and causes a
264 *         MediaPlayer in the <em>Started</em>, <em>Paused</em>, <em>Prepared
265 *         </em> or <em>PlaybackCompleted</em> state to enter the
266 *         <em>Stopped</em> state.
267 *         <ul>
268 *         <li>Once in the <em>Stopped</em> state, playback cannot be started
269 *         until {@link #prepare()} or {@link #prepareAsync()} are called to set
270 *         the MediaPlayer object to the <em>Prepared</em> state again.</li>
271 *         <li>Calling {@link #stop()} has no effect on a MediaPlayer
272 *         object that is already in the <em>Stopped</em> state.</li>
273 *         </ul>
274 *         </li>
275 *     <li>The playback position can be adjusted with a call to
276 *         {@link #seekTo(int, int)}.
277 *         <ul>
278 *         <li>Although the asynchronuous {@link #seekTo(int, int)}
279 *         call returns right away, the actual seek operation may take a while to
280 *         finish, especially for audio/video being streamed. When the actual
281 *         seek operation completes, the internal player engine calls a user
282 *         supplied OnSeekComplete.onSeekComplete() if an OnSeekCompleteListener
283 *         has been registered beforehand via
284 *         {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}.</li>
285 *         <li>Please
286 *         note that {@link #seekTo(int, int)} can also be called in the other states,
287 *         such as <em>Prepared</em>, <em>Paused</em> and <em>PlaybackCompleted
288 *         </em> state. When {@link #seekTo(int, int)} is called in those states,
289 *         one video frame will be displayed if the stream has video and the requested
290 *         position is valid.
291 *         </li>
292 *         <li>Furthermore, the actual current playback position
293 *         can be retrieved with a call to {@link #getCurrentPosition()}, which
294 *         is helpful for applications such as a Music player that need to keep
295 *         track of the playback progress.</li>
296 *         </ul>
297 *         </li>
298 *     <li>When the playback reaches the end of stream, the playback completes.
299 *         <ul>
300 *         <li>If the looping mode was being set to <var>true</var>with
301 *         {@link #setLooping(boolean)}, the MediaPlayer object shall remain in
302 *         the <em>Started</em> state.</li>
303 *         <li>If the looping mode was set to <var>false
304 *         </var>, the player engine calls a user supplied callback method,
305 *         OnCompletion.onCompletion(), if a OnCompletionListener is registered
306 *         beforehand via {@link #setOnCompletionListener(OnCompletionListener)}.
307 *         The invoke of the callback signals that the object is now in the <em>
308 *         PlaybackCompleted</em> state.</li>
309 *         <li>While in the <em>PlaybackCompleted</em>
310 *         state, calling {@link #start()} can restart the playback from the
311 *         beginning of the audio/video source.</li>
312 * </ul>
313 *
314 *
315 * <a name="Valid_and_Invalid_States"></a>
316 * <h3>Valid and invalid states</h3>
317 *
318 * <table border="0" cellspacing="0" cellpadding="0">
319 * <tr><td>Method Name </p></td>
320 *     <td>Valid Sates </p></td>
321 *     <td>Invalid States </p></td>
322 *     <td>Comments </p></td></tr>
323 * <tr><td>attachAuxEffect </p></td>
324 *     <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
325 *     <td>{Idle, Error} </p></td>
326 *     <td>This method must be called after setDataSource.
327 *     Calling it does not change the object state. </p></td></tr>
328 * <tr><td>getAudioSessionId </p></td>
329 *     <td>any </p></td>
330 *     <td>{} </p></td>
331 *     <td>This method can be called in any state and calling it does not change
332 *         the object state. </p></td></tr>
333 * <tr><td>getCurrentPosition </p></td>
334 *     <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
335 *         PlaybackCompleted} </p></td>
336 *     <td>{Error}</p></td>
337 *     <td>Successful invoke of this method in a valid state does not change the
338 *         state. Calling this method in an invalid state transfers the object
339 *         to the <em>Error</em> state. </p></td></tr>
340 * <tr><td>getDuration </p></td>
341 *     <td>{Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
342 *     <td>{Idle, Initialized, Error} </p></td>
343 *     <td>Successful invoke of this method in a valid state does not change the
344 *         state. Calling this method in an invalid state transfers the object
345 *         to the <em>Error</em> state. </p></td></tr>
346 * <tr><td>getVideoHeight </p></td>
347 *     <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
348 *         PlaybackCompleted}</p></td>
349 *     <td>{Error}</p></td>
350 *     <td>Successful invoke of this method in a valid state does not change the
351 *         state. Calling this method in an invalid state transfers the object
352 *         to the <em>Error</em> state.  </p></td></tr>
353 * <tr><td>getVideoWidth </p></td>
354 *     <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
355 *         PlaybackCompleted}</p></td>
356 *     <td>{Error}</p></td>
357 *     <td>Successful invoke of this method in a valid state does not change
358 *         the state. Calling this method in an invalid state transfers the
359 *         object to the <em>Error</em> state. </p></td></tr>
360 * <tr><td>isPlaying </p></td>
361 *     <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
362 *          PlaybackCompleted}</p></td>
363 *     <td>{Error}</p></td>
364 *     <td>Successful invoke of this method in a valid state does not change
365 *         the state. Calling this method in an invalid state transfers the
366 *         object to the <em>Error</em> state. </p></td></tr>
367 * <tr><td>pause </p></td>
368 *     <td>{Started, Paused, PlaybackCompleted}</p></td>
369 *     <td>{Idle, Initialized, Prepared, Stopped, Error}</p></td>
370 *     <td>Successful invoke of this method in a valid state transfers the
371 *         object to the <em>Paused</em> state. Calling this method in an
372 *         invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
373 * <tr><td>prepare </p></td>
374 *     <td>{Initialized, Stopped} </p></td>
375 *     <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td>
376 *     <td>Successful invoke of this method in a valid state transfers the
377 *         object to the <em>Prepared</em> state. Calling this method in an
378 *         invalid state throws an IllegalStateException.</p></td></tr>
379 * <tr><td>prepareAsync </p></td>
380 *     <td>{Initialized, Stopped} </p></td>
381 *     <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td>
382 *     <td>Successful invoke of this method in a valid state transfers the
383 *         object to the <em>Preparing</em> state. Calling this method in an
384 *         invalid state throws an IllegalStateException.</p></td></tr>
385 * <tr><td>release </p></td>
386 *     <td>any </p></td>
387 *     <td>{} </p></td>
388 *     <td>After {@link #release()}, the object is no longer available. </p></td></tr>
389 * <tr><td>reset </p></td>
390 *     <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
391 *         PlaybackCompleted, Error}</p></td>
392 *     <td>{}</p></td>
393 *     <td>After {@link #reset()}, the object is like being just created.</p></td></tr>
394 * <tr><td>seekTo </p></td>
395 *     <td>{Prepared, Started, Paused, PlaybackCompleted} </p></td>
396 *     <td>{Idle, Initialized, Stopped, Error}</p></td>
397 *     <td>Successful invoke of this method in a valid state does not change
398 *         the state. Calling this method in an invalid state transfers the
399 *         object to the <em>Error</em> state. </p></td></tr>
400 * <tr><td>setAudioAttributes </p></td>
401 *     <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
402 *          PlaybackCompleted}</p></td>
403 *     <td>{Error}</p></td>
404 *     <td>Successful invoke of this method does not change the state. In order for the
405 *         target audio attributes type to become effective, this method must be called before
406 *         prepare() or prepareAsync().</p></td></tr>
407 * <tr><td>setAudioSessionId </p></td>
408 *     <td>{Idle} </p></td>
409 *     <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted,
410 *          Error} </p></td>
411 *     <td>This method must be called in idle state as the audio session ID must be known before
412 *         calling setDataSource. Calling it does not change the object state. </p></td></tr>
413 * <tr><td>setAudioStreamType (deprecated)</p></td>
414 *     <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
415 *          PlaybackCompleted}</p></td>
416 *     <td>{Error}</p></td>
417 *     <td>Successful invoke of this method does not change the state. In order for the
418 *         target audio stream type to become effective, this method must be called before
419 *         prepare() or prepareAsync().</p></td></tr>
420 * <tr><td>setAuxEffectSendLevel </p></td>
421 *     <td>any</p></td>
422 *     <td>{} </p></td>
423 *     <td>Calling this method does not change the object state. </p></td></tr>
424 * <tr><td>setDataSource </p></td>
425 *     <td>{Idle} </p></td>
426 *     <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted,
427 *          Error} </p></td>
428 *     <td>Successful invoke of this method in a valid state transfers the
429 *         object to the <em>Initialized</em> state. Calling this method in an
430 *         invalid state throws an IllegalStateException.</p></td></tr>
431 * <tr><td>setDisplay </p></td>
432 *     <td>any </p></td>
433 *     <td>{} </p></td>
434 *     <td>This method can be called in any state and calling it does not change
435 *         the object state. </p></td></tr>
436 * <tr><td>setSurface </p></td>
437 *     <td>any </p></td>
438 *     <td>{} </p></td>
439 *     <td>This method can be called in any state and calling it does not change
440 *         the object state. </p></td></tr>
441 * <tr><td>setVideoScalingMode </p></td>
442 *     <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
443 *     <td>{Idle, Error}</p></td>
444 *     <td>Successful invoke of this method does not change the state.</p></td></tr>
445 * <tr><td>setLooping </p></td>
446 *     <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
447 *         PlaybackCompleted}</p></td>
448 *     <td>{Error}</p></td>
449 *     <td>Successful invoke of this method in a valid state does not change
450 *         the state. Calling this method in an
451 *         invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
452 * <tr><td>isLooping </p></td>
453 *     <td>any </p></td>
454 *     <td>{} </p></td>
455 *     <td>This method can be called in any state and calling it does not change
456 *         the object state. </p></td></tr>
457 * <tr><td>setOnBufferingUpdateListener </p></td>
458 *     <td>any </p></td>
459 *     <td>{} </p></td>
460 *     <td>This method can be called in any state and calling it does not change
461 *         the object state. </p></td></tr>
462 * <tr><td>setOnCompletionListener </p></td>
463 *     <td>any </p></td>
464 *     <td>{} </p></td>
465 *     <td>This method can be called in any state and calling it does not change
466 *         the object state. </p></td></tr>
467 * <tr><td>setOnErrorListener </p></td>
468 *     <td>any </p></td>
469 *     <td>{} </p></td>
470 *     <td>This method can be called in any state and calling it does not change
471 *         the object state. </p></td></tr>
472 * <tr><td>setOnPreparedListener </p></td>
473 *     <td>any </p></td>
474 *     <td>{} </p></td>
475 *     <td>This method can be called in any state and calling it does not change
476 *         the object state. </p></td></tr>
477 * <tr><td>setOnSeekCompleteListener </p></td>
478 *     <td>any </p></td>
479 *     <td>{} </p></td>
480 *     <td>This method can be called in any state and calling it does not change
481 *         the object state. </p></td></tr>
482 * <tr><td>setPlaybackParams</p></td>
483 *     <td>{Initialized, Prepared, Started, Paused, PlaybackCompleted, Error}</p></td>
484 *     <td>{Idle, Stopped} </p></td>
485 *     <td>This method will change state in some cases, depending on when it's called.
486 *         </p></td></tr>
487 * <tr><td>setScreenOnWhilePlaying</></td>
488 *     <td>any </p></td>
489 *     <td>{} </p></td>
490 *     <td>This method can be called in any state and calling it does not change
491 *         the object state.  </p></td></tr>
492 * <tr><td>setVolume </p></td>
493 *     <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
494 *          PlaybackCompleted}</p></td>
495 *     <td>{Error}</p></td>
496 *     <td>Successful invoke of this method does not change the state.
497 * <tr><td>setWakeMode </p></td>
498 *     <td>any </p></td>
499 *     <td>{} </p></td>
500 *     <td>This method can be called in any state and calling it does not change
501 *         the object state.</p></td></tr>
502 * <tr><td>start </p></td>
503 *     <td>{Prepared, Started, Paused, PlaybackCompleted}</p></td>
504 *     <td>{Idle, Initialized, Stopped, Error}</p></td>
505 *     <td>Successful invoke of this method in a valid state transfers the
506 *         object to the <em>Started</em> state. Calling this method in an
507 *         invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
508 * <tr><td>stop </p></td>
509 *     <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
510 *     <td>{Idle, Initialized, Error}</p></td>
511 *     <td>Successful invoke of this method in a valid state transfers the
512 *         object to the <em>Stopped</em> state. Calling this method in an
513 *         invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
514 * <tr><td>getTrackInfo </p></td>
515 *     <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
516 *     <td>{Idle, Initialized, Error}</p></td>
517 *     <td>Successful invoke of this method does not change the state.</p></td></tr>
518 * <tr><td>addTimedTextSource </p></td>
519 *     <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
520 *     <td>{Idle, Initialized, Error}</p></td>
521 *     <td>Successful invoke of this method does not change the state.</p></td></tr>
522 * <tr><td>selectTrack </p></td>
523 *     <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
524 *     <td>{Idle, Initialized, Error}</p></td>
525 *     <td>Successful invoke of this method does not change the state.</p></td></tr>
526 * <tr><td>deselectTrack </p></td>
527 *     <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
528 *     <td>{Idle, Initialized, Error}</p></td>
529 *     <td>Successful invoke of this method does not change the state.</p></td></tr>
530 *
531 * </table>
532 *
533 * <a name="Permissions"></a>
534 * <h3>Permissions</h3>
535 * <p>One may need to declare a corresponding WAKE_LOCK permission {@link
536 * android.R.styleable#AndroidManifestUsesPermission &lt;uses-permission&gt;}
537 * element.
538 *
539 * <p>This class requires the {@link android.Manifest.permission#INTERNET} permission
540 * when used with network-based content.
541 *
542 * <a name="Callbacks"></a>
543 * <h3>Callbacks</h3>
544 * <p>Applications may want to register for informational and error
545 * events in order to be informed of some internal state update and
546 * possible runtime errors during playback or streaming. Registration for
547 * these events is done by properly setting the appropriate listeners (via calls
548 * to
549 * {@link #setOnPreparedListener(OnPreparedListener)}setOnPreparedListener,
550 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}setOnVideoSizeChangedListener,
551 * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}setOnSeekCompleteListener,
552 * {@link #setOnCompletionListener(OnCompletionListener)}setOnCompletionListener,
553 * {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}setOnBufferingUpdateListener,
554 * {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener,
555 * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener, etc).
556 * In order to receive the respective callback
557 * associated with these listeners, applications are required to create
558 * MediaPlayer objects on a thread with its own Looper running (main UI
559 * thread by default has a Looper running).
560 *
561 */
562public class MediaPlayer extends PlayerBase
563                         implements SubtitleController.Listener
564{
565    /**
566       Constant to retrieve only the new metadata since the last
567       call.
568       // FIXME: unhide.
569       // FIXME: add link to getMetadata(boolean, boolean)
570       {@hide}
571     */
572    public static final boolean METADATA_UPDATE_ONLY = true;
573
574    /**
575       Constant to retrieve all the metadata.
576       // FIXME: unhide.
577       // FIXME: add link to getMetadata(boolean, boolean)
578       {@hide}
579     */
580    public static final boolean METADATA_ALL = false;
581
582    /**
583       Constant to enable the metadata filter during retrieval.
584       // FIXME: unhide.
585       // FIXME: add link to getMetadata(boolean, boolean)
586       {@hide}
587     */
588    public static final boolean APPLY_METADATA_FILTER = true;
589
590    /**
591       Constant to disable the metadata filter during retrieval.
592       // FIXME: unhide.
593       // FIXME: add link to getMetadata(boolean, boolean)
594       {@hide}
595     */
596    public static final boolean BYPASS_METADATA_FILTER = false;
597
598    static {
599        System.loadLibrary("media_jni");
600        native_init();
601    }
602
603    private final static String TAG = "MediaPlayer";
604    // Name of the remote interface for the media player. Must be kept
605    // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE
606    // macro invocation in IMediaPlayer.cpp
607    private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer";
608
609    private long mNativeContext; // accessed by native methods
610    private long mNativeSurfaceTexture;  // accessed by native methods
611    private int mListenerContext; // accessed by native methods
612    private SurfaceHolder mSurfaceHolder;
613    private EventHandler mEventHandler;
614    private PowerManager.WakeLock mWakeLock = null;
615    private boolean mScreenOnWhilePlaying;
616    private boolean mStayAwake;
617    private int mStreamType = AudioManager.USE_DEFAULT_STREAM_TYPE;
618    private int mUsage = -1;
619    private boolean mBypassInterruptionPolicy;
620
621    /**
622     * Default constructor. Consider using one of the create() methods for
623     * synchronously instantiating a MediaPlayer from a Uri or resource.
624     * <p>When done with the MediaPlayer, you should call  {@link #release()},
625     * to free the resources. If not released, too many MediaPlayer instances may
626     * result in an exception.</p>
627     */
628    public MediaPlayer() {
629        super(new AudioAttributes.Builder().build(),
630                AudioPlaybackConfiguration.PLAYER_TYPE_JAM_MEDIAPLAYER);
631
632        Looper looper;
633        if ((looper = Looper.myLooper()) != null) {
634            mEventHandler = new EventHandler(this, looper);
635        } else if ((looper = Looper.getMainLooper()) != null) {
636            mEventHandler = new EventHandler(this, looper);
637        } else {
638            mEventHandler = null;
639        }
640
641        mTimeProvider = new TimeProvider(this);
642        mOpenSubtitleSources = new Vector<InputStream>();
643
644        /* Native setup requires a weak reference to our object.
645         * It's easier to create it here than in C++.
646         */
647        native_setup(new WeakReference<MediaPlayer>(this));
648    }
649
650    /*
651     * Update the MediaPlayer SurfaceTexture.
652     * Call after setting a new display surface.
653     */
654    private native void _setVideoSurface(Surface surface);
655
656    /* Do not change these values (starting with INVOKE_ID) without updating
657     * their counterparts in include/media/mediaplayer.h!
658     */
659    private static final int INVOKE_ID_GET_TRACK_INFO = 1;
660    private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE = 2;
661    private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3;
662    private static final int INVOKE_ID_SELECT_TRACK = 4;
663    private static final int INVOKE_ID_DESELECT_TRACK = 5;
664    private static final int INVOKE_ID_SET_VIDEO_SCALE_MODE = 6;
665    private static final int INVOKE_ID_GET_SELECTED_TRACK = 7;
666
667    /**
668     * Create a request parcel which can be routed to the native media
669     * player using {@link #invoke(Parcel, Parcel)}. The Parcel
670     * returned has the proper InterfaceToken set. The caller should
671     * not overwrite that token, i.e it can only append data to the
672     * Parcel.
673     *
674     * @return A parcel suitable to hold a request for the native
675     * player.
676     * {@hide}
677     */
678    public Parcel newRequest() {
679        Parcel parcel = Parcel.obtain();
680        parcel.writeInterfaceToken(IMEDIA_PLAYER);
681        return parcel;
682    }
683
684    /**
685     * Invoke a generic method on the native player using opaque
686     * parcels for the request and reply. Both payloads' format is a
687     * convention between the java caller and the native player.
688     * Must be called after setDataSource to make sure a native player
689     * exists. On failure, a RuntimeException is thrown.
690     *
691     * @param request Parcel with the data for the extension. The
692     * caller must use {@link #newRequest()} to get one.
693     *
694     * @param reply Output parcel with the data returned by the
695     * native player.
696     * {@hide}
697     */
698    public void invoke(Parcel request, Parcel reply) {
699        int retcode = native_invoke(request, reply);
700        reply.setDataPosition(0);
701        if (retcode != 0) {
702            throw new RuntimeException("failure code: " + retcode);
703        }
704    }
705
706    /**
707     * Sets the {@link SurfaceHolder} to use for displaying the video
708     * portion of the media.
709     *
710     * Either a surface holder or surface must be set if a display or video sink
711     * is needed.  Not calling this method or {@link #setSurface(Surface)}
712     * when playing back a video will result in only the audio track being played.
713     * A null surface holder or surface will result in only the audio track being
714     * played.
715     *
716     * @param sh the SurfaceHolder to use for video display
717     * @throws IllegalStateException if the internal player engine has not been
718     * initialized or has been released.
719     */
720    public void setDisplay(SurfaceHolder sh) {
721        mSurfaceHolder = sh;
722        Surface surface;
723        if (sh != null) {
724            surface = sh.getSurface();
725        } else {
726            surface = null;
727        }
728        _setVideoSurface(surface);
729        updateSurfaceScreenOn();
730    }
731
732    /**
733     * Sets the {@link Surface} to be used as the sink for the video portion of
734     * the media. This is similar to {@link #setDisplay(SurfaceHolder)}, but
735     * does not support {@link #setScreenOnWhilePlaying(boolean)}.  Setting a
736     * Surface will un-set any Surface or SurfaceHolder that was previously set.
737     * A null surface will result in only the audio track being played.
738     *
739     * If the Surface sends frames to a {@link SurfaceTexture}, the timestamps
740     * returned from {@link SurfaceTexture#getTimestamp()} will have an
741     * unspecified zero point.  These timestamps cannot be directly compared
742     * between different media sources, different instances of the same media
743     * source, or multiple runs of the same program.  The timestamp is normally
744     * monotonically increasing and is unaffected by time-of-day adjustments,
745     * but it is reset when the position is set.
746     *
747     * @param surface The {@link Surface} to be used for the video portion of
748     * the media.
749     * @throws IllegalStateException if the internal player engine has not been
750     * initialized or has been released.
751     */
752    public void setSurface(Surface surface) {
753        if (mScreenOnWhilePlaying && surface != null) {
754            Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective for Surface");
755        }
756        mSurfaceHolder = null;
757        _setVideoSurface(surface);
758        updateSurfaceScreenOn();
759    }
760
761    /* Do not change these video scaling mode values below without updating
762     * their counterparts in system/window.h! Please do not forget to update
763     * {@link #isVideoScalingModeSupported} when new video scaling modes
764     * are added.
765     */
766    /**
767     * Specifies a video scaling mode. The content is stretched to the
768     * surface rendering area. When the surface has the same aspect ratio
769     * as the content, the aspect ratio of the content is maintained;
770     * otherwise, the aspect ratio of the content is not maintained when video
771     * is being rendered. Unlike {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING},
772     * there is no content cropping with this video scaling mode.
773     */
774    public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1;
775
776    /**
777     * Specifies a video scaling mode. The content is scaled, maintaining
778     * its aspect ratio. The whole surface area is always used. When the
779     * aspect ratio of the content is the same as the surface, no content
780     * is cropped; otherwise, content is cropped to fit the surface.
781     */
782    public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
783    /**
784     * Sets video scaling mode. To make the target video scaling mode
785     * effective during playback, this method must be called after
786     * data source is set. If not called, the default video
787     * scaling mode is {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}.
788     *
789     * <p> The supported video scaling modes are:
790     * <ul>
791     * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}
792     * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}
793     * </ul>
794     *
795     * @param mode target video scaling mode. Must be one of the supported
796     * video scaling modes; otherwise, IllegalArgumentException will be thrown.
797     *
798     * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT
799     * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING
800     */
801    public void setVideoScalingMode(int mode) {
802        if (!isVideoScalingModeSupported(mode)) {
803            final String msg = "Scaling mode " + mode + " is not supported";
804            throw new IllegalArgumentException(msg);
805        }
806        Parcel request = Parcel.obtain();
807        Parcel reply = Parcel.obtain();
808        try {
809            request.writeInterfaceToken(IMEDIA_PLAYER);
810            request.writeInt(INVOKE_ID_SET_VIDEO_SCALE_MODE);
811            request.writeInt(mode);
812            invoke(request, reply);
813        } finally {
814            request.recycle();
815            reply.recycle();
816        }
817    }
818
819    /**
820     * Convenience method to create a MediaPlayer for a given Uri.
821     * On success, {@link #prepare()} will already have been called and must not be called again.
822     * <p>When done with the MediaPlayer, you should call  {@link #release()},
823     * to free the resources. If not released, too many MediaPlayer instances will
824     * result in an exception.</p>
825     * <p>Note that since {@link #prepare()} is called automatically in this method,
826     * you cannot change the audio
827     * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
828     * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
829     *
830     * @param context the Context to use
831     * @param uri the Uri from which to get the datasource
832     * @return a MediaPlayer object, or null if creation failed
833     */
834    public static MediaPlayer create(Context context, Uri uri) {
835        return create (context, uri, null);
836    }
837
838    /**
839     * Convenience method to create a MediaPlayer for a given Uri.
840     * On success, {@link #prepare()} will already have been called and must not be called again.
841     * <p>When done with the MediaPlayer, you should call  {@link #release()},
842     * to free the resources. If not released, too many MediaPlayer instances will
843     * result in an exception.</p>
844     * <p>Note that since {@link #prepare()} is called automatically in this method,
845     * you cannot change the audio
846     * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
847     * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
848     *
849     * @param context the Context to use
850     * @param uri the Uri from which to get the datasource
851     * @param holder the SurfaceHolder to use for displaying the video
852     * @return a MediaPlayer object, or null if creation failed
853     */
854    public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder) {
855        int s = AudioSystem.newAudioSessionId();
856        return create(context, uri, holder, null, s > 0 ? s : 0);
857    }
858
859    /**
860     * Same factory method as {@link #create(Context, Uri, SurfaceHolder)} but that lets you specify
861     * the audio attributes and session ID to be used by the new MediaPlayer instance.
862     * @param context the Context to use
863     * @param uri the Uri from which to get the datasource
864     * @param holder the SurfaceHolder to use for displaying the video, may be null.
865     * @param audioAttributes the {@link AudioAttributes} to be used by the media player.
866     * @param audioSessionId the audio session ID to be used by the media player,
867     *     see {@link AudioManager#generateAudioSessionId()} to obtain a new session.
868     * @return a MediaPlayer object, or null if creation failed
869     */
870    public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder,
871            AudioAttributes audioAttributes, int audioSessionId) {
872
873        try {
874            MediaPlayer mp = new MediaPlayer();
875            final AudioAttributes aa = audioAttributes != null ? audioAttributes :
876                new AudioAttributes.Builder().build();
877            mp.setAudioAttributes(aa);
878            mp.setAudioSessionId(audioSessionId);
879            mp.setDataSource(context, uri);
880            if (holder != null) {
881                mp.setDisplay(holder);
882            }
883            mp.prepare();
884            return mp;
885        } catch (IOException ex) {
886            Log.d(TAG, "create failed:", ex);
887            // fall through
888        } catch (IllegalArgumentException ex) {
889            Log.d(TAG, "create failed:", ex);
890            // fall through
891        } catch (SecurityException ex) {
892            Log.d(TAG, "create failed:", ex);
893            // fall through
894        }
895
896        return null;
897    }
898
899    // Note no convenience method to create a MediaPlayer with SurfaceTexture sink.
900
901    /**
902     * Convenience method to create a MediaPlayer for a given resource id.
903     * On success, {@link #prepare()} will already have been called and must not be called again.
904     * <p>When done with the MediaPlayer, you should call  {@link #release()},
905     * to free the resources. If not released, too many MediaPlayer instances will
906     * result in an exception.</p>
907     * <p>Note that since {@link #prepare()} is called automatically in this method,
908     * you cannot change the audio
909     * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
910     * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
911     *
912     * @param context the Context to use
913     * @param resid the raw resource id (<var>R.raw.&lt;something></var>) for
914     *              the resource to use as the datasource
915     * @return a MediaPlayer object, or null if creation failed
916     */
917    public static MediaPlayer create(Context context, int resid) {
918        int s = AudioSystem.newAudioSessionId();
919        return create(context, resid, null, s > 0 ? s : 0);
920    }
921
922    /**
923     * Same factory method as {@link #create(Context, int)} but that lets you specify the audio
924     * attributes and session ID to be used by the new MediaPlayer instance.
925     * @param context the Context to use
926     * @param resid the raw resource id (<var>R.raw.&lt;something></var>) for
927     *              the resource to use as the datasource
928     * @param audioAttributes the {@link AudioAttributes} to be used by the media player.
929     * @param audioSessionId the audio session ID to be used by the media player,
930     *     see {@link AudioManager#generateAudioSessionId()} to obtain a new session.
931     * @return a MediaPlayer object, or null if creation failed
932     */
933    public static MediaPlayer create(Context context, int resid,
934            AudioAttributes audioAttributes, int audioSessionId) {
935        try {
936            AssetFileDescriptor afd = context.getResources().openRawResourceFd(resid);
937            if (afd == null) return null;
938
939            MediaPlayer mp = new MediaPlayer();
940
941            final AudioAttributes aa = audioAttributes != null ? audioAttributes :
942                new AudioAttributes.Builder().build();
943            mp.setAudioAttributes(aa);
944            mp.setAudioSessionId(audioSessionId);
945
946            mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
947            afd.close();
948            mp.prepare();
949            return mp;
950        } catch (IOException ex) {
951            Log.d(TAG, "create failed:", ex);
952            // fall through
953        } catch (IllegalArgumentException ex) {
954            Log.d(TAG, "create failed:", ex);
955           // fall through
956        } catch (SecurityException ex) {
957            Log.d(TAG, "create failed:", ex);
958            // fall through
959        }
960        return null;
961    }
962
963    /**
964     * Sets the data source as a content Uri.
965     *
966     * @param context the Context to use when resolving the Uri
967     * @param uri the Content URI of the data you want to play
968     * @throws IllegalStateException if it is called in an invalid state
969     */
970    public void setDataSource(@NonNull Context context, @NonNull Uri uri)
971            throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
972        setDataSource(context, uri, null);
973    }
974
975    /**
976     * Sets the data source as a content Uri.
977     *
978     * @param context the Context to use when resolving the Uri
979     * @param uri the Content URI of the data you want to play
980     * @param headers the headers to be sent together with the request for the data
981     *                Note that the cross domain redirection is allowed by default, but that can be
982     *                changed with key/value pairs through the headers parameter with
983     *                "android-allow-cross-domain-redirect" as the key and "0" or "1" as the value
984     *                to disallow or allow cross domain redirection.
985     * @throws IllegalStateException if it is called in an invalid state
986     */
987    public void setDataSource(@NonNull Context context, @NonNull Uri uri,
988            @Nullable Map<String, String> headers) throws IOException, IllegalArgumentException,
989                    SecurityException, IllegalStateException {
990        // The context and URI usually belong to the calling user. Get a resolver for that user
991        // and strip out the userId from the URI if present.
992        final ContentResolver resolver = context.getContentResolver();
993        final String scheme = uri.getScheme();
994        final String authority = ContentProvider.getAuthorityWithoutUserId(uri.getAuthority());
995        if (ContentResolver.SCHEME_FILE.equals(scheme)) {
996            setDataSource(uri.getPath());
997            return;
998        } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)
999                && Settings.AUTHORITY.equals(authority)) {
1000            // Try cached ringtone first since the actual provider may not be
1001            // encryption aware, or it may be stored on CE media storage
1002            final int type = RingtoneManager.getDefaultType(uri);
1003            final Uri cacheUri = RingtoneManager.getCacheForType(type, context.getUserId());
1004            final Uri actualUri = RingtoneManager.getActualDefaultRingtoneUri(context, type);
1005            if (attemptDataSource(resolver, cacheUri)) {
1006                return;
1007            } else if (attemptDataSource(resolver, actualUri)) {
1008                return;
1009            } else {
1010                setDataSource(uri.toString(), headers);
1011            }
1012        } else {
1013            // Try requested Uri locally first, or fallback to media server
1014            if (attemptDataSource(resolver, uri)) {
1015                return;
1016            } else {
1017                setDataSource(uri.toString(), headers);
1018            }
1019        }
1020    }
1021
1022    private boolean attemptDataSource(ContentResolver resolver, Uri uri) {
1023        try (AssetFileDescriptor afd = resolver.openAssetFileDescriptor(uri, "r")) {
1024            setDataSource(afd);
1025            return true;
1026        } catch (NullPointerException | SecurityException | IOException ex) {
1027            Log.w(TAG, "Couldn't open " + uri + ": " + ex);
1028            return false;
1029        }
1030    }
1031
1032    /**
1033     * Sets the data source (file-path or http/rtsp URL) to use.
1034     *
1035     * @param path the path of the file, or the http/rtsp URL of the stream you want to play
1036     * @throws IllegalStateException if it is called in an invalid state
1037     *
1038     * <p>When <code>path</code> refers to a local file, the file may actually be opened by a
1039     * process other than the calling application.  This implies that the pathname
1040     * should be an absolute path (as any other process runs with unspecified current working
1041     * directory), and that the pathname should reference a world-readable file.
1042     * As an alternative, the application could first open the file for reading,
1043     * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}.
1044     */
1045    public void setDataSource(String path)
1046            throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
1047        setDataSource(path, null, null);
1048    }
1049
1050    /**
1051     * Sets the data source (file-path or http/rtsp URL) to use.
1052     *
1053     * @param path the path of the file, or the http/rtsp URL of the stream you want to play
1054     * @param headers the headers associated with the http request for the stream you want to play
1055     * @throws IllegalStateException if it is called in an invalid state
1056     * @hide pending API council
1057     */
1058    public void setDataSource(String path, Map<String, String> headers)
1059            throws IOException, IllegalArgumentException, SecurityException, IllegalStateException
1060    {
1061        String[] keys = null;
1062        String[] values = null;
1063
1064        if (headers != null) {
1065            keys = new String[headers.size()];
1066            values = new String[headers.size()];
1067
1068            int i = 0;
1069            for (Map.Entry<String, String> entry: headers.entrySet()) {
1070                keys[i] = entry.getKey();
1071                values[i] = entry.getValue();
1072                ++i;
1073            }
1074        }
1075        setDataSource(path, keys, values);
1076    }
1077
1078    private void setDataSource(String path, String[] keys, String[] values)
1079            throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
1080        final Uri uri = Uri.parse(path);
1081        final String scheme = uri.getScheme();
1082        if ("file".equals(scheme)) {
1083            path = uri.getPath();
1084        } else if (scheme != null) {
1085            // handle non-file sources
1086            nativeSetDataSource(
1087                MediaHTTPService.createHttpServiceBinderIfNecessary(path),
1088                path,
1089                keys,
1090                values);
1091            return;
1092        }
1093
1094        final File file = new File(path);
1095        if (file.exists()) {
1096            FileInputStream is = new FileInputStream(file);
1097            FileDescriptor fd = is.getFD();
1098            setDataSource(fd);
1099            is.close();
1100        } else {
1101            throw new IOException("setDataSource failed.");
1102        }
1103    }
1104
1105    private native void nativeSetDataSource(
1106        IBinder httpServiceBinder, String path, String[] keys, String[] values)
1107        throws IOException, IllegalArgumentException, SecurityException, IllegalStateException;
1108
1109    /**
1110     * Sets the data source (AssetFileDescriptor) to use. It is the caller's
1111     * responsibility to close the file descriptor. It is safe to do so as soon
1112     * as this call returns.
1113     *
1114     * @param afd the AssetFileDescriptor for the file you want to play
1115     * @throws IllegalStateException if it is called in an invalid state
1116     * @throws IllegalArgumentException if afd is not a valid AssetFileDescriptor
1117     * @throws IOException if afd can not be read
1118     */
1119    public void setDataSource(@NonNull AssetFileDescriptor afd)
1120            throws IOException, IllegalArgumentException, IllegalStateException {
1121        Preconditions.checkNotNull(afd);
1122        // Note: using getDeclaredLength so that our behavior is the same
1123        // as previous versions when the content provider is returning
1124        // a full file.
1125        if (afd.getDeclaredLength() < 0) {
1126            setDataSource(afd.getFileDescriptor());
1127        } else {
1128            setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getDeclaredLength());
1129        }
1130    }
1131
1132    /**
1133     * Sets the data source (FileDescriptor) to use. It is the caller's responsibility
1134     * to close the file descriptor. It is safe to do so as soon as this call returns.
1135     *
1136     * @param fd the FileDescriptor for the file you want to play
1137     * @throws IllegalStateException if it is called in an invalid state
1138     * @throws IllegalArgumentException if fd is not a valid FileDescriptor
1139     * @throws IOException if fd can not be read
1140     */
1141    public void setDataSource(FileDescriptor fd)
1142            throws IOException, IllegalArgumentException, IllegalStateException {
1143        // intentionally less than LONG_MAX
1144        setDataSource(fd, 0, 0x7ffffffffffffffL);
1145    }
1146
1147    /**
1148     * Sets the data source (FileDescriptor) to use.  The FileDescriptor must be
1149     * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility
1150     * to close the file descriptor. It is safe to do so as soon as this call returns.
1151     *
1152     * @param fd the FileDescriptor for the file you want to play
1153     * @param offset the offset into the file where the data to be played starts, in bytes
1154     * @param length the length in bytes of the data to be played
1155     * @throws IllegalStateException if it is called in an invalid state
1156     * @throws IllegalArgumentException if fd is not a valid FileDescriptor
1157     * @throws IOException if fd can not be read
1158     */
1159    public void setDataSource(FileDescriptor fd, long offset, long length)
1160            throws IOException, IllegalArgumentException, IllegalStateException {
1161        _setDataSource(fd, offset, length);
1162    }
1163
1164    private native void _setDataSource(FileDescriptor fd, long offset, long length)
1165            throws IOException, IllegalArgumentException, IllegalStateException;
1166
1167    /**
1168     * Sets the data source (MediaDataSource) to use.
1169     *
1170     * @param dataSource the MediaDataSource for the media you want to play
1171     * @throws IllegalStateException if it is called in an invalid state
1172     * @throws IllegalArgumentException if dataSource is not a valid MediaDataSource
1173     */
1174    public void setDataSource(MediaDataSource dataSource)
1175            throws IllegalArgumentException, IllegalStateException {
1176        _setDataSource(dataSource);
1177    }
1178
1179    private native void _setDataSource(MediaDataSource dataSource)
1180          throws IllegalArgumentException, IllegalStateException;
1181
1182    /**
1183     * Prepares the player for playback, synchronously.
1184     *
1185     * After setting the datasource and the display surface, you need to either
1186     * call prepare() or prepareAsync(). For files, it is OK to call prepare(),
1187     * which blocks until MediaPlayer is ready for playback.
1188     *
1189     * @throws IllegalStateException if it is called in an invalid state
1190     */
1191    public void prepare() throws IOException, IllegalStateException {
1192        _prepare();
1193        scanInternalSubtitleTracks();
1194    }
1195
1196    private native void _prepare() throws IOException, IllegalStateException;
1197
1198    /**
1199     * Prepares the player for playback, asynchronously.
1200     *
1201     * After setting the datasource and the display surface, you need to either
1202     * call prepare() or prepareAsync(). For streams, you should call prepareAsync(),
1203     * which returns immediately, rather than blocking until enough data has been
1204     * buffered.
1205     *
1206     * @throws IllegalStateException if it is called in an invalid state
1207     */
1208    public native void prepareAsync() throws IllegalStateException;
1209
1210    /**
1211     * Starts or resumes playback. If playback had previously been paused,
1212     * playback will continue from where it was paused. If playback had
1213     * been stopped, or never started before, playback will start at the
1214     * beginning.
1215     *
1216     * @throws IllegalStateException if it is called in an invalid state
1217     */
1218    public void start() throws IllegalStateException {
1219        baseStart();
1220        stayAwake(true);
1221        _start();
1222    }
1223
1224    private native void _start() throws IllegalStateException;
1225
1226
1227    private int getAudioStreamType() {
1228        if (mStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
1229            mStreamType = _getAudioStreamType();
1230        }
1231        return mStreamType;
1232    }
1233
1234    private native int _getAudioStreamType() throws IllegalStateException;
1235
1236    /**
1237     * Stops playback after playback has been stopped or paused.
1238     *
1239     * @throws IllegalStateException if the internal player engine has not been
1240     * initialized.
1241     */
1242    public void stop() throws IllegalStateException {
1243        stayAwake(false);
1244        _stop();
1245        baseStop();
1246    }
1247
1248    private native void _stop() throws IllegalStateException;
1249
1250    /**
1251     * Pauses playback. Call start() to resume.
1252     *
1253     * @throws IllegalStateException if the internal player engine has not been
1254     * initialized.
1255     */
1256    public void pause() throws IllegalStateException {
1257        stayAwake(false);
1258        _pause();
1259        basePause();
1260    }
1261
1262    private native void _pause() throws IllegalStateException;
1263
1264    /**
1265     * Set the low-level power management behavior for this MediaPlayer.  This
1266     * can be used when the MediaPlayer is not playing through a SurfaceHolder
1267     * set with {@link #setDisplay(SurfaceHolder)} and thus can use the
1268     * high-level {@link #setScreenOnWhilePlaying(boolean)} feature.
1269     *
1270     * <p>This function has the MediaPlayer access the low-level power manager
1271     * service to control the device's power usage while playing is occurring.
1272     * The parameter is a combination of {@link android.os.PowerManager} wake flags.
1273     * Use of this method requires {@link android.Manifest.permission#WAKE_LOCK}
1274     * permission.
1275     * By default, no attempt is made to keep the device awake during playback.
1276     *
1277     * @param context the Context to use
1278     * @param mode    the power/wake mode to set
1279     * @see android.os.PowerManager
1280     */
1281    public void setWakeMode(Context context, int mode) {
1282        boolean washeld = false;
1283
1284        /* Disable persistant wakelocks in media player based on property */
1285        if (SystemProperties.getBoolean("audio.offload.ignore_setawake", false) == true) {
1286            Log.w(TAG, "IGNORING setWakeMode " + mode);
1287            return;
1288        }
1289
1290        if (mWakeLock != null) {
1291            if (mWakeLock.isHeld()) {
1292                washeld = true;
1293                mWakeLock.release();
1294            }
1295            mWakeLock = null;
1296        }
1297
1298        PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE);
1299        mWakeLock = pm.newWakeLock(mode|PowerManager.ON_AFTER_RELEASE, MediaPlayer.class.getName());
1300        mWakeLock.setReferenceCounted(false);
1301        if (washeld) {
1302            mWakeLock.acquire();
1303        }
1304    }
1305
1306    /**
1307     * Control whether we should use the attached SurfaceHolder to keep the
1308     * screen on while video playback is occurring.  This is the preferred
1309     * method over {@link #setWakeMode} where possible, since it doesn't
1310     * require that the application have permission for low-level wake lock
1311     * access.
1312     *
1313     * @param screenOn Supply true to keep the screen on, false to allow it
1314     * to turn off.
1315     */
1316    public void setScreenOnWhilePlaying(boolean screenOn) {
1317        if (mScreenOnWhilePlaying != screenOn) {
1318            if (screenOn && mSurfaceHolder == null) {
1319                Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective without a SurfaceHolder");
1320            }
1321            mScreenOnWhilePlaying = screenOn;
1322            updateSurfaceScreenOn();
1323        }
1324    }
1325
1326    private void stayAwake(boolean awake) {
1327        if (mWakeLock != null) {
1328            if (awake && !mWakeLock.isHeld()) {
1329                mWakeLock.acquire();
1330            } else if (!awake && mWakeLock.isHeld()) {
1331                mWakeLock.release();
1332            }
1333        }
1334        mStayAwake = awake;
1335        updateSurfaceScreenOn();
1336    }
1337
1338    private void updateSurfaceScreenOn() {
1339        if (mSurfaceHolder != null) {
1340            mSurfaceHolder.setKeepScreenOn(mScreenOnWhilePlaying && mStayAwake);
1341        }
1342    }
1343
1344    /**
1345     * Returns the width of the video.
1346     *
1347     * @return the width of the video, or 0 if there is no video,
1348     * no display surface was set, or the width has not been determined
1349     * yet. The OnVideoSizeChangedListener can be registered via
1350     * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}
1351     * to provide a notification when the width is available.
1352     */
1353    public native int getVideoWidth();
1354
1355    /**
1356     * Returns the height of the video.
1357     *
1358     * @return the height of the video, or 0 if there is no video,
1359     * no display surface was set, or the height has not been determined
1360     * yet. The OnVideoSizeChangedListener can be registered via
1361     * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}
1362     * to provide a notification when the height is available.
1363     */
1364    public native int getVideoHeight();
1365
1366    /**
1367     * Checks whether the MediaPlayer is playing.
1368     *
1369     * @return true if currently playing, false otherwise
1370     * @throws IllegalStateException if the internal player engine has not been
1371     * initialized or has been released.
1372     */
1373    public native boolean isPlaying();
1374
1375    /**
1376     * Change playback speed of audio by resampling the audio.
1377     * <p>
1378     * Specifies resampling as audio mode for variable rate playback, i.e.,
1379     * resample the waveform based on the requested playback rate to get
1380     * a new waveform, and play back the new waveform at the original sampling
1381     * frequency.
1382     * When rate is larger than 1.0, pitch becomes higher.
1383     * When rate is smaller than 1.0, pitch becomes lower.
1384     *
1385     * @hide
1386     */
1387    public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
1388
1389    /**
1390     * Change playback speed of audio without changing its pitch.
1391     * <p>
1392     * Specifies time stretching as audio mode for variable rate playback.
1393     * Time stretching changes the duration of the audio samples without
1394     * affecting its pitch.
1395     * <p>
1396     * This mode is only supported for a limited range of playback speed factors,
1397     * e.g. between 1/2x and 2x.
1398     *
1399     * @hide
1400     */
1401    public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
1402
1403    /**
1404     * Change playback speed of audio without changing its pitch, and
1405     * possibly mute audio if time stretching is not supported for the playback
1406     * speed.
1407     * <p>
1408     * Try to keep audio pitch when changing the playback rate, but allow the
1409     * system to determine how to change audio playback if the rate is out
1410     * of range.
1411     *
1412     * @hide
1413     */
1414    public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
1415
1416    /** @hide */
1417    @IntDef(
1418        value = {
1419            PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
1420            PLAYBACK_RATE_AUDIO_MODE_STRETCH,
1421            PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
1422    })
1423    @Retention(RetentionPolicy.SOURCE)
1424    public @interface PlaybackRateAudioMode {}
1425
1426    /**
1427     * Sets playback rate and audio mode.
1428     *
1429     * @param rate the ratio between desired playback rate and normal one.
1430     * @param audioMode audio playback mode. Must be one of the supported
1431     * audio modes.
1432     *
1433     * @throws IllegalStateException if the internal player engine has not been
1434     * initialized.
1435     * @throws IllegalArgumentException if audioMode is not supported.
1436     *
1437     * @hide
1438     */
1439    @NonNull
1440    public PlaybackParams easyPlaybackParams(float rate, @PlaybackRateAudioMode int audioMode) {
1441        PlaybackParams params = new PlaybackParams();
1442        params.allowDefaults();
1443        switch (audioMode) {
1444        case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
1445            params.setSpeed(rate).setPitch(1.0f);
1446            break;
1447        case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
1448            params.setSpeed(rate).setPitch(1.0f)
1449                    .setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL);
1450            break;
1451        case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
1452            params.setSpeed(rate).setPitch(rate);
1453            break;
1454        default:
1455            final String msg = "Audio playback mode " + audioMode + " is not supported";
1456            throw new IllegalArgumentException(msg);
1457        }
1458        return params;
1459    }
1460
1461    /**
1462     * Sets playback rate using {@link PlaybackParams}. The object sets its internal
1463     * PlaybackParams to the input, except that the object remembers previous speed
1464     * when input speed is zero. This allows the object to resume at previous speed
1465     * when start() is called. Calling it before the object is prepared does not change
1466     * the object state. After the object is prepared, calling it with zero speed is
1467     * equivalent to calling pause(). After the object is prepared, calling it with
1468     * non-zero speed is equivalent to calling start().
1469     *
1470     * @param params the playback params.
1471     *
1472     * @throws IllegalStateException if the internal player engine has not been
1473     * initialized or has been released.
1474     * @throws IllegalArgumentException if params is not supported.
1475     */
1476    public native void setPlaybackParams(@NonNull PlaybackParams params);
1477
1478    /**
1479     * Gets the playback params, containing the current playback rate.
1480     *
1481     * @return the playback params.
1482     * @throws IllegalStateException if the internal player engine has not been
1483     * initialized.
1484     */
1485    @NonNull
1486    public native PlaybackParams getPlaybackParams();
1487
1488    /**
1489     * Sets A/V sync mode.
1490     *
1491     * @param params the A/V sync params to apply
1492     *
1493     * @throws IllegalStateException if the internal player engine has not been
1494     * initialized.
1495     * @throws IllegalArgumentException if params are not supported.
1496     */
1497    public native void setSyncParams(@NonNull SyncParams params);
1498
1499    /**
1500     * Gets the A/V sync mode.
1501     *
1502     * @return the A/V sync params
1503     *
1504     * @throws IllegalStateException if the internal player engine has not been
1505     * initialized.
1506     */
1507    @NonNull
1508    public native SyncParams getSyncParams();
1509
1510    /**
1511     * Seek modes used in method seekTo(int, int) to move media position
1512     * to a specified location.
1513     *
1514     * Do not change these mode values without updating their counterparts
1515     * in include/media/IMediaSource.h!
1516     */
1517    /**
1518     * This mode is used with {@link #seekTo(int, int)} to move media position to
1519     * a sync (or key) frame associated with a data source that is located
1520     * right before or at the given time.
1521     *
1522     * @see #seekTo(int, int)
1523     */
1524    public static final int SEEK_PREVIOUS_SYNC    = 0x00;
1525    /**
1526     * This mode is used with {@link #seekTo(int, int)} to move media position to
1527     * a sync (or key) frame associated with a data source that is located
1528     * right after or at the given time.
1529     *
1530     * @see #seekTo(int, int)
1531     */
1532    public static final int SEEK_NEXT_SYNC        = 0x01;
1533    /**
1534     * This mode is used with {@link #seekTo(int, int)} to move media position to
1535     * a sync (or key) frame associated with a data source that is located
1536     * closest to (in time) or at the given time.
1537     *
1538     * @see #seekTo(int, int)
1539     */
1540    public static final int SEEK_CLOSEST_SYNC     = 0x02;
1541    /**
1542     * This mode is used with {@link #seekTo(int, int)} to move media position to
1543     * a frame (not necessarily a key frame) associated with a data source that
1544     * is located closest to or at the given time.
1545     *
1546     * @see #seekTo(int, int)
1547     */
1548    public static final int SEEK_CLOSEST          = 0x03;
1549
1550    /** @hide */
1551    @IntDef(
1552        value = {
1553            SEEK_PREVIOUS_SYNC,
1554            SEEK_NEXT_SYNC,
1555            SEEK_CLOSEST_SYNC,
1556            SEEK_CLOSEST,
1557    })
1558    @Retention(RetentionPolicy.SOURCE)
1559    public @interface SeekMode {}
1560
1561    private native final void _seekTo(int msec, int mode);
1562
1563    /**
1564     * Moves the media to specified time position by considering the given mode.
1565     * <p>
1566     * When seekTo is finished, the user will be notified via OnSeekComplete supplied by the user.
1567     * There is at most one active seekTo processed at any time. If there is a to-be-completed
1568     * seekTo, new seekTo requests will be queued in such a way that only the last request
1569     * is kept. When current seekTo is completed, the queued request will be processed if
1570     * that request is different from just-finished seekTo operation, i.e., the requested
1571     * position or mode is different.
1572     *
1573     * @param msec the offset in milliseconds from the start to seek to.
1574     * When seeking to the given time position, there is no guarantee that the data source
1575     * has a frame located at the position. When this happens, a frame nearby will be rendered.
1576     * If msec is negative, time position zero will be used.
1577     * If msec is larger than duration, duration will be used.
1578     * @param mode the mode indicating where exactly to seek to.
1579     * Use {@link #SEEK_PREVIOUS_SYNC} if one wants to seek to a sync frame
1580     * that has a timestamp earlier than or the same as msec. Use
1581     * {@link #SEEK_NEXT_SYNC} if one wants to seek to a sync frame
1582     * that has a timestamp later than or the same as msec. Use
1583     * {@link #SEEK_CLOSEST_SYNC} if one wants to seek to a sync frame
1584     * that has a timestamp closest to or the same as msec. Use
1585     * {@link #SEEK_CLOSEST} if one wants to seek to a frame that may
1586     * or may not be a sync frame but is closest to or the same as msec.
1587     * {@link #SEEK_CLOSEST} often has larger performance overhead compared
1588     * to the other options if there is no sync frame located at msec.
1589     * @throws IllegalStateException if the internal player engine has not been
1590     * initialized
1591     * @throws IllegalArgumentException if the mode is invalid.
1592     */
1593    public void seekTo(int msec, @SeekMode int mode) throws IllegalStateException {
1594        if (mode < SEEK_PREVIOUS_SYNC || mode > SEEK_CLOSEST) {
1595            final String msg = "Illegal seek mode: " + mode;
1596            throw new IllegalArgumentException(msg);
1597        }
1598        _seekTo(msec, mode);
1599    }
1600
1601    /**
1602     * Seeks to specified time position.
1603     * Same as {@link #seekTo(int, int)} with {@code mode = SEEK_PREVIOUS_SYNC}.
1604     *
1605     * @param msec the offset in milliseconds from the start to seek to
1606     * @throws IllegalStateException if the internal player engine has not been
1607     * initialized
1608     */
1609    public void seekTo(int msec) throws IllegalStateException {
1610        seekTo(msec, SEEK_PREVIOUS_SYNC /* mode */);
1611    }
1612
1613    /**
1614     * Get current playback position as a {@link MediaTimestamp}.
1615     * <p>
1616     * The MediaTimestamp represents how the media time correlates to the system time in
1617     * a linear fashion using an anchor and a clock rate. During regular playback, the media
1618     * time moves fairly constantly (though the anchor frame may be rebased to a current
1619     * system time, the linear correlation stays steady). Therefore, this method does not
1620     * need to be called often.
1621     * <p>
1622     * To help users get current playback position, this method always anchors the timestamp
1623     * to the current {@link System#nanoTime system time}, so
1624     * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
1625     *
1626     * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
1627     *         is available, e.g. because the media player has not been initialized.
1628     *
1629     * @see MediaTimestamp
1630     */
1631    @Nullable
1632    public MediaTimestamp getTimestamp()
1633    {
1634        try {
1635            // TODO: get the timestamp from native side
1636            return new MediaTimestamp(
1637                    getCurrentPosition() * 1000L,
1638                    System.nanoTime(),
1639                    isPlaying() ? getPlaybackParams().getSpeed() : 0.f);
1640        } catch (IllegalStateException e) {
1641            return null;
1642        }
1643    }
1644
1645    /**
1646     * Gets the current playback position.
1647     *
1648     * @return the current position in milliseconds
1649     */
1650    public native int getCurrentPosition();
1651
1652    /**
1653     * Gets the duration of the file.
1654     *
1655     * @return the duration in milliseconds, if no duration is available
1656     *         (for example, if streaming live content), -1 is returned.
1657     */
1658    public native int getDuration();
1659
1660    /**
1661     * Gets the media metadata.
1662     *
1663     * @param update_only controls whether the full set of available
1664     * metadata is returned or just the set that changed since the
1665     * last call. See {@see #METADATA_UPDATE_ONLY} and {@see
1666     * #METADATA_ALL}.
1667     *
1668     * @param apply_filter if true only metadata that matches the
1669     * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see
1670     * #BYPASS_METADATA_FILTER}.
1671     *
1672     * @return The metadata, possibly empty. null if an error occured.
1673     // FIXME: unhide.
1674     * {@hide}
1675     */
1676    public Metadata getMetadata(final boolean update_only,
1677                                final boolean apply_filter) {
1678        Parcel reply = Parcel.obtain();
1679        Metadata data = new Metadata();
1680
1681        if (!native_getMetadata(update_only, apply_filter, reply)) {
1682            reply.recycle();
1683            return null;
1684        }
1685
1686        // Metadata takes over the parcel, don't recycle it unless
1687        // there is an error.
1688        if (!data.parse(reply)) {
1689            reply.recycle();
1690            return null;
1691        }
1692        return data;
1693    }
1694
1695    /**
1696     * Set a filter for the metadata update notification and update
1697     * retrieval. The caller provides 2 set of metadata keys, allowed
1698     * and blocked. The blocked set always takes precedence over the
1699     * allowed one.
1700     * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as
1701     * shorthands to allow/block all or no metadata.
1702     *
1703     * By default, there is no filter set.
1704     *
1705     * @param allow Is the set of metadata the client is interested
1706     *              in receiving new notifications for.
1707     * @param block Is the set of metadata the client is not interested
1708     *              in receiving new notifications for.
1709     * @return The call status code.
1710     *
1711     // FIXME: unhide.
1712     * {@hide}
1713     */
1714    public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) {
1715        // Do our serialization manually instead of calling
1716        // Parcel.writeArray since the sets are made of the same type
1717        // we avoid paying the price of calling writeValue (used by
1718        // writeArray) which burns an extra int per element to encode
1719        // the type.
1720        Parcel request =  newRequest();
1721
1722        // The parcel starts already with an interface token. There
1723        // are 2 filters. Each one starts with a 4bytes number to
1724        // store the len followed by a number of int (4 bytes as well)
1725        // representing the metadata type.
1726        int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size());
1727
1728        if (request.dataCapacity() < capacity) {
1729            request.setDataCapacity(capacity);
1730        }
1731
1732        request.writeInt(allow.size());
1733        for(Integer t: allow) {
1734            request.writeInt(t);
1735        }
1736        request.writeInt(block.size());
1737        for(Integer t: block) {
1738            request.writeInt(t);
1739        }
1740        return native_setMetadataFilter(request);
1741    }
1742
1743    /**
1744     * Set the MediaPlayer to start when this MediaPlayer finishes playback
1745     * (i.e. reaches the end of the stream).
1746     * The media framework will attempt to transition from this player to
1747     * the next as seamlessly as possible. The next player can be set at
1748     * any time before completion, but shall be after setDataSource has been
1749     * called successfully. The next player must be prepared by the
1750     * app, and the application should not call start() on it.
1751     * The next MediaPlayer must be different from 'this'. An exception
1752     * will be thrown if next == this.
1753     * The application may call setNextMediaPlayer(null) to indicate no
1754     * next player should be started at the end of playback.
1755     * If the current player is looping, it will keep looping and the next
1756     * player will not be started.
1757     *
1758     * @param next the player to start after this one completes playback.
1759     *
1760     */
1761    public native void setNextMediaPlayer(MediaPlayer next);
1762
1763    /**
1764     * Releases resources associated with this MediaPlayer object.
1765     * It is considered good practice to call this method when you're
1766     * done using the MediaPlayer. In particular, whenever an Activity
1767     * of an application is paused (its onPause() method is called),
1768     * or stopped (its onStop() method is called), this method should be
1769     * invoked to release the MediaPlayer object, unless the application
1770     * has a special need to keep the object around. In addition to
1771     * unnecessary resources (such as memory and instances of codecs)
1772     * being held, failure to call this method immediately if a
1773     * MediaPlayer object is no longer needed may also lead to
1774     * continuous battery consumption for mobile devices, and playback
1775     * failure for other applications if no multiple instances of the
1776     * same codec are supported on a device. Even if multiple instances
1777     * of the same codec are supported, some performance degradation
1778     * may be expected when unnecessary multiple instances are used
1779     * at the same time.
1780     */
1781    public void release() {
1782        baseRelease();
1783        stayAwake(false);
1784        updateSurfaceScreenOn();
1785        mOnPreparedListener = null;
1786        mOnBufferingUpdateListener = null;
1787        mOnCompletionListener = null;
1788        mOnSeekCompleteListener = null;
1789        mOnErrorListener = null;
1790        mOnInfoListener = null;
1791        mOnVideoSizeChangedListener = null;
1792        mOnTimedTextListener = null;
1793        if (mTimeProvider != null) {
1794            mTimeProvider.close();
1795            mTimeProvider = null;
1796        }
1797        mOnSubtitleDataListener = null;
1798        _release();
1799    }
1800
1801    private native void _release();
1802
1803    /**
1804     * Resets the MediaPlayer to its uninitialized state. After calling
1805     * this method, you will have to initialize it again by setting the
1806     * data source and calling prepare().
1807     */
1808    public void reset() {
1809        mSelectedSubtitleTrackIndex = -1;
1810        synchronized(mOpenSubtitleSources) {
1811            for (final InputStream is: mOpenSubtitleSources) {
1812                try {
1813                    is.close();
1814                } catch (IOException e) {
1815                }
1816            }
1817            mOpenSubtitleSources.clear();
1818        }
1819        if (mSubtitleController != null) {
1820            mSubtitleController.reset();
1821        }
1822        if (mTimeProvider != null) {
1823            mTimeProvider.close();
1824            mTimeProvider = null;
1825        }
1826
1827        stayAwake(false);
1828        _reset();
1829        // make sure none of the listeners get called anymore
1830        if (mEventHandler != null) {
1831            mEventHandler.removeCallbacksAndMessages(null);
1832        }
1833
1834        synchronized (mIndexTrackPairs) {
1835            mIndexTrackPairs.clear();
1836            mInbandTrackIndices.clear();
1837        };
1838    }
1839
1840    private native void _reset();
1841
1842    /**
1843     * Sets the audio stream type for this MediaPlayer. See {@link AudioManager}
1844     * for a list of stream types. Must call this method before prepare() or
1845     * prepareAsync() in order for the target stream type to become effective
1846     * thereafter.
1847     *
1848     * @param streamtype the audio stream type
1849     * @deprecated use {@link #setAudioAttributes(AudioAttributes)}
1850     * @see android.media.AudioManager
1851     */
1852    public void setAudioStreamType(int streamtype) {
1853        deprecateStreamTypeForPlayback(streamtype, "MediaPlayer", "setAudioStreamType()");
1854        baseUpdateAudioAttributes(
1855                new AudioAttributes.Builder().setInternalLegacyStreamType(streamtype).build());
1856        _setAudioStreamType(streamtype);
1857        mStreamType = streamtype;
1858    }
1859
1860    private native void _setAudioStreamType(int streamtype);
1861
1862    // Keep KEY_PARAMETER_* in sync with include/media/mediaplayer.h
1863    private final static int KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400;
1864    /**
1865     * Sets the parameter indicated by key.
1866     * @param key key indicates the parameter to be set.
1867     * @param value value of the parameter to be set.
1868     * @return true if the parameter is set successfully, false otherwise
1869     * {@hide}
1870     */
1871    private native boolean setParameter(int key, Parcel value);
1872
1873    /**
1874     * Sets the audio attributes for this MediaPlayer.
1875     * See {@link AudioAttributes} for how to build and configure an instance of this class.
1876     * You must call this method before {@link #prepare()} or {@link #prepareAsync()} in order
1877     * for the audio attributes to become effective thereafter.
1878     * @param attributes a non-null set of audio attributes
1879     */
1880    public void setAudioAttributes(AudioAttributes attributes) throws IllegalArgumentException {
1881        if (attributes == null) {
1882            final String msg = "Cannot set AudioAttributes to null";
1883            throw new IllegalArgumentException(msg);
1884        }
1885        baseUpdateAudioAttributes(attributes);
1886        mUsage = attributes.getUsage();
1887        mBypassInterruptionPolicy = (attributes.getAllFlags()
1888                & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0;
1889        Parcel pattributes = Parcel.obtain();
1890        attributes.writeToParcel(pattributes, AudioAttributes.FLATTEN_TAGS);
1891        setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, pattributes);
1892        pattributes.recycle();
1893    }
1894
1895    /**
1896     * Sets the player to be looping or non-looping.
1897     *
1898     * @param looping whether to loop or not
1899     */
1900    public native void setLooping(boolean looping);
1901
1902    /**
1903     * Checks whether the MediaPlayer is looping or non-looping.
1904     *
1905     * @return true if the MediaPlayer is currently looping, false otherwise
1906     */
1907    public native boolean isLooping();
1908
1909    /**
1910     * Sets the volume on this player.
1911     * This API is recommended for balancing the output of audio streams
1912     * within an application. Unless you are writing an application to
1913     * control user settings, this API should be used in preference to
1914     * {@link AudioManager#setStreamVolume(int, int, int)} which sets the volume of ALL streams of
1915     * a particular type. Note that the passed volume values are raw scalars in range 0.0 to 1.0.
1916     * UI controls should be scaled logarithmically.
1917     *
1918     * @param leftVolume left volume scalar
1919     * @param rightVolume right volume scalar
1920     */
1921    /*
1922     * FIXME: Merge this into javadoc comment above when setVolume(float) is not @hide.
1923     * The single parameter form below is preferred if the channel volumes don't need
1924     * to be set independently.
1925     */
1926    public void setVolume(float leftVolume, float rightVolume) {
1927        baseSetVolume(leftVolume, rightVolume);
1928    }
1929
1930    @Override
1931    void playerSetVolume(boolean muting, float leftVolume, float rightVolume) {
1932        _setVolume(muting ? 0.0f : leftVolume, muting ? 0.0f : rightVolume);
1933    }
1934
1935    private native void _setVolume(float leftVolume, float rightVolume);
1936
1937    /**
1938     * Similar, excepts sets volume of all channels to same value.
1939     * @hide
1940     */
1941    public void setVolume(float volume) {
1942        setVolume(volume, volume);
1943    }
1944
1945    /**
1946     * Sets the audio session ID.
1947     *
1948     * @param sessionId the audio session ID.
1949     * The audio session ID is a system wide unique identifier for the audio stream played by
1950     * this MediaPlayer instance.
1951     * The primary use of the audio session ID  is to associate audio effects to a particular
1952     * instance of MediaPlayer: if an audio session ID is provided when creating an audio effect,
1953     * this effect will be applied only to the audio content of media players within the same
1954     * audio session and not to the output mix.
1955     * When created, a MediaPlayer instance automatically generates its own audio session ID.
1956     * However, it is possible to force this player to be part of an already existing audio session
1957     * by calling this method.
1958     * This method must be called before one of the overloaded <code> setDataSource </code> methods.
1959     * @throws IllegalStateException if it is called in an invalid state
1960     */
1961    public native void setAudioSessionId(int sessionId)  throws IllegalArgumentException, IllegalStateException;
1962
1963    /**
1964     * Returns the audio session ID.
1965     *
1966     * @return the audio session ID. {@see #setAudioSessionId(int)}
1967     * Note that the audio session ID is 0 only if a problem occured when the MediaPlayer was contructed.
1968     */
1969    public native int getAudioSessionId();
1970
1971    /**
1972     * Attaches an auxiliary effect to the player. A typical auxiliary effect is a reverberation
1973     * effect which can be applied on any sound source that directs a certain amount of its
1974     * energy to this effect. This amount is defined by setAuxEffectSendLevel().
1975     * See {@link #setAuxEffectSendLevel(float)}.
1976     * <p>After creating an auxiliary effect (e.g.
1977     * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
1978     * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling this method
1979     * to attach the player to the effect.
1980     * <p>To detach the effect from the player, call this method with a null effect id.
1981     * <p>This method must be called after one of the overloaded <code> setDataSource </code>
1982     * methods.
1983     * @param effectId system wide unique id of the effect to attach
1984     */
1985    public native void attachAuxEffect(int effectId);
1986
1987
1988    /**
1989     * Sets the send level of the player to the attached auxiliary effect.
1990     * See {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
1991     * <p>By default the send level is 0, so even if an effect is attached to the player
1992     * this method must be called for the effect to be applied.
1993     * <p>Note that the passed level value is a raw scalar. UI controls should be scaled
1994     * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
1995     * so an appropriate conversion from linear UI input x to level is:
1996     * x == 0 -> level = 0
1997     * 0 < x <= R -> level = 10^(72*(x-R)/20/R)
1998     * @param level send level scalar
1999     */
2000    public void setAuxEffectSendLevel(float level) {
2001        baseSetAuxEffectSendLevel(level);
2002    }
2003
2004    @Override
2005    int playerSetAuxEffectSendLevel(boolean muting, float level) {
2006        _setAuxEffectSendLevel(muting ? 0.0f : level);
2007        return AudioSystem.SUCCESS;
2008    }
2009
2010    private native void _setAuxEffectSendLevel(float level);
2011
2012    /*
2013     * @param request Parcel destinated to the media player. The
2014     *                Interface token must be set to the IMediaPlayer
2015     *                one to be routed correctly through the system.
2016     * @param reply[out] Parcel that will contain the reply.
2017     * @return The status code.
2018     */
2019    private native final int native_invoke(Parcel request, Parcel reply);
2020
2021
2022    /*
2023     * @param update_only If true fetch only the set of metadata that have
2024     *                    changed since the last invocation of getMetadata.
2025     *                    The set is built using the unfiltered
2026     *                    notifications the native player sent to the
2027     *                    MediaPlayerService during that period of
2028     *                    time. If false, all the metadatas are considered.
2029     * @param apply_filter  If true, once the metadata set has been built based on
2030     *                     the value update_only, the current filter is applied.
2031     * @param reply[out] On return contains the serialized
2032     *                   metadata. Valid only if the call was successful.
2033     * @return The status code.
2034     */
2035    private native final boolean native_getMetadata(boolean update_only,
2036                                                    boolean apply_filter,
2037                                                    Parcel reply);
2038
2039    /*
2040     * @param request Parcel with the 2 serialized lists of allowed
2041     *                metadata types followed by the one to be
2042     *                dropped. Each list starts with an integer
2043     *                indicating the number of metadata type elements.
2044     * @return The status code.
2045     */
2046    private native final int native_setMetadataFilter(Parcel request);
2047
2048    private static native final void native_init();
2049    private native final void native_setup(Object mediaplayer_this);
2050    private native final void native_finalize();
2051
2052    /**
2053     * Class for MediaPlayer to return each audio/video/subtitle track's metadata.
2054     *
2055     * @see android.media.MediaPlayer#getTrackInfo
2056     */
2057    static public class TrackInfo implements Parcelable {
2058        /**
2059         * Gets the track type.
2060         * @return TrackType which indicates if the track is video, audio, timed text.
2061         */
2062        public int getTrackType() {
2063            return mTrackType;
2064        }
2065
2066        /**
2067         * Gets the language code of the track.
2068         * @return a language code in either way of ISO-639-1 or ISO-639-2.
2069         * When the language is unknown or could not be determined,
2070         * ISO-639-2 language code, "und", is returned.
2071         */
2072        public String getLanguage() {
2073            String language = mFormat.getString(MediaFormat.KEY_LANGUAGE);
2074            return language == null ? "und" : language;
2075        }
2076
2077        /**
2078         * Gets the {@link MediaFormat} of the track.  If the format is
2079         * unknown or could not be determined, null is returned.
2080         */
2081        public MediaFormat getFormat() {
2082            if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT
2083                    || mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
2084                return mFormat;
2085            }
2086            return null;
2087        }
2088
2089        public static final int MEDIA_TRACK_TYPE_UNKNOWN = 0;
2090        public static final int MEDIA_TRACK_TYPE_VIDEO = 1;
2091        public static final int MEDIA_TRACK_TYPE_AUDIO = 2;
2092        public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3;
2093        public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4;
2094        public static final int MEDIA_TRACK_TYPE_METADATA = 5;
2095
2096        final int mTrackType;
2097        final MediaFormat mFormat;
2098
2099        TrackInfo(Parcel in) {
2100            mTrackType = in.readInt();
2101            // TODO: parcel in the full MediaFormat; currently we are using createSubtitleFormat
2102            // even for audio/video tracks, meaning we only set the mime and language.
2103            String mime = in.readString();
2104            String language = in.readString();
2105            mFormat = MediaFormat.createSubtitleFormat(mime, language);
2106
2107            if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
2108                mFormat.setInteger(MediaFormat.KEY_IS_AUTOSELECT, in.readInt());
2109                mFormat.setInteger(MediaFormat.KEY_IS_DEFAULT, in.readInt());
2110                mFormat.setInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, in.readInt());
2111            }
2112        }
2113
2114        /** @hide */
2115        TrackInfo(int type, MediaFormat format) {
2116            mTrackType = type;
2117            mFormat = format;
2118        }
2119
2120        /**
2121         * {@inheritDoc}
2122         */
2123        @Override
2124        public int describeContents() {
2125            return 0;
2126        }
2127
2128        /**
2129         * {@inheritDoc}
2130         */
2131        @Override
2132        public void writeToParcel(Parcel dest, int flags) {
2133            dest.writeInt(mTrackType);
2134            dest.writeString(getLanguage());
2135
2136            if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
2137                dest.writeString(mFormat.getString(MediaFormat.KEY_MIME));
2138                dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_AUTOSELECT));
2139                dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_DEFAULT));
2140                dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE));
2141            }
2142        }
2143
2144        @Override
2145        public String toString() {
2146            StringBuilder out = new StringBuilder(128);
2147            out.append(getClass().getName());
2148            out.append('{');
2149            switch (mTrackType) {
2150            case MEDIA_TRACK_TYPE_VIDEO:
2151                out.append("VIDEO");
2152                break;
2153            case MEDIA_TRACK_TYPE_AUDIO:
2154                out.append("AUDIO");
2155                break;
2156            case MEDIA_TRACK_TYPE_TIMEDTEXT:
2157                out.append("TIMEDTEXT");
2158                break;
2159            case MEDIA_TRACK_TYPE_SUBTITLE:
2160                out.append("SUBTITLE");
2161                break;
2162            default:
2163                out.append("UNKNOWN");
2164                break;
2165            }
2166            out.append(", " + mFormat.toString());
2167            out.append("}");
2168            return out.toString();
2169        }
2170
2171        /**
2172         * Used to read a TrackInfo from a Parcel.
2173         */
2174        static final Parcelable.Creator<TrackInfo> CREATOR
2175                = new Parcelable.Creator<TrackInfo>() {
2176                    @Override
2177                    public TrackInfo createFromParcel(Parcel in) {
2178                        return new TrackInfo(in);
2179                    }
2180
2181                    @Override
2182                    public TrackInfo[] newArray(int size) {
2183                        return new TrackInfo[size];
2184                    }
2185                };
2186
2187    };
2188
2189    // We would like domain specific classes with more informative names than the `first` and `second`
2190    // in generic Pair, but we would also like to avoid creating new/trivial classes. As a compromise
2191    // we document the meanings of `first` and `second` here:
2192    //
2193    // Pair.first - inband track index; non-null iff representing an inband track.
2194    // Pair.second - a SubtitleTrack registered with mSubtitleController; non-null iff representing
2195    //               an inband subtitle track or any out-of-band track (subtitle or timedtext).
2196    private Vector<Pair<Integer, SubtitleTrack>> mIndexTrackPairs = new Vector<>();
2197    private BitSet mInbandTrackIndices = new BitSet();
2198
2199    /**
2200     * Returns an array of track information.
2201     *
2202     * @return Array of track info. The total number of tracks is the array length.
2203     * Must be called again if an external timed text source has been added after any of the
2204     * addTimedTextSource methods are called.
2205     * @throws IllegalStateException if it is called in an invalid state.
2206     */
2207    public TrackInfo[] getTrackInfo() throws IllegalStateException {
2208        TrackInfo trackInfo[] = getInbandTrackInfo();
2209        // add out-of-band tracks
2210        synchronized (mIndexTrackPairs) {
2211            TrackInfo allTrackInfo[] = new TrackInfo[mIndexTrackPairs.size()];
2212            for (int i = 0; i < allTrackInfo.length; i++) {
2213                Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
2214                if (p.first != null) {
2215                    // inband track
2216                    allTrackInfo[i] = trackInfo[p.first];
2217                } else {
2218                    SubtitleTrack track = p.second;
2219                    allTrackInfo[i] = new TrackInfo(track.getTrackType(), track.getFormat());
2220                }
2221            }
2222            return allTrackInfo;
2223        }
2224    }
2225
2226    private TrackInfo[] getInbandTrackInfo() throws IllegalStateException {
2227        Parcel request = Parcel.obtain();
2228        Parcel reply = Parcel.obtain();
2229        try {
2230            request.writeInterfaceToken(IMEDIA_PLAYER);
2231            request.writeInt(INVOKE_ID_GET_TRACK_INFO);
2232            invoke(request, reply);
2233            TrackInfo trackInfo[] = reply.createTypedArray(TrackInfo.CREATOR);
2234            return trackInfo;
2235        } finally {
2236            request.recycle();
2237            reply.recycle();
2238        }
2239    }
2240
2241    /* Do not change these values without updating their counterparts
2242     * in include/media/stagefright/MediaDefs.h and media/libstagefright/MediaDefs.cpp!
2243     */
2244    /**
2245     * MIME type for SubRip (SRT) container. Used in addTimedTextSource APIs.
2246     */
2247    public static final String MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
2248
2249    /**
2250     * MIME type for WebVTT subtitle data.
2251     * @hide
2252     */
2253    public static final String MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
2254
2255    /**
2256     * MIME type for CEA-608 closed caption data.
2257     * @hide
2258     */
2259    public static final String MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
2260
2261    /**
2262     * MIME type for CEA-708 closed caption data.
2263     * @hide
2264     */
2265    public static final String MEDIA_MIMETYPE_TEXT_CEA_708 = "text/cea-708";
2266
2267    /*
2268     * A helper function to check if the mime type is supported by media framework.
2269     */
2270    private static boolean availableMimeTypeForExternalSource(String mimeType) {
2271        if (MEDIA_MIMETYPE_TEXT_SUBRIP.equals(mimeType)) {
2272            return true;
2273        }
2274        return false;
2275    }
2276
2277    private SubtitleController mSubtitleController;
2278
2279    /** @hide */
2280    public void setSubtitleAnchor(
2281            SubtitleController controller,
2282            SubtitleController.Anchor anchor) {
2283        // TODO: create SubtitleController in MediaPlayer
2284        mSubtitleController = controller;
2285        mSubtitleController.setAnchor(anchor);
2286    }
2287
2288    /**
2289     * The private version of setSubtitleAnchor is used internally to set mSubtitleController if
2290     * necessary when clients don't provide their own SubtitleControllers using the public version
2291     * {@link #setSubtitleAnchor(SubtitleController, Anchor)} (e.g. {@link VideoView} provides one).
2292     */
2293    private synchronized void setSubtitleAnchor() {
2294        if ((mSubtitleController == null) && (ActivityThread.currentApplication() != null)) {
2295            final HandlerThread thread = new HandlerThread("SetSubtitleAnchorThread");
2296            thread.start();
2297            Handler handler = new Handler(thread.getLooper());
2298            handler.post(new Runnable() {
2299                @Override
2300                public void run() {
2301                    Context context = ActivityThread.currentApplication();
2302                    mSubtitleController = new SubtitleController(context, mTimeProvider, MediaPlayer.this);
2303                    mSubtitleController.setAnchor(new Anchor() {
2304                        @Override
2305                        public void setSubtitleWidget(RenderingWidget subtitleWidget) {
2306                        }
2307
2308                        @Override
2309                        public Looper getSubtitleLooper() {
2310                            return Looper.getMainLooper();
2311                        }
2312                    });
2313                    thread.getLooper().quitSafely();
2314                }
2315            });
2316            try {
2317                thread.join();
2318            } catch (InterruptedException e) {
2319                Thread.currentThread().interrupt();
2320                Log.w(TAG, "failed to join SetSubtitleAnchorThread");
2321            }
2322        }
2323    }
2324
2325    private int mSelectedSubtitleTrackIndex = -1;
2326    private Vector<InputStream> mOpenSubtitleSources;
2327
2328    private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() {
2329        @Override
2330        public void onSubtitleData(MediaPlayer mp, SubtitleData data) {
2331            int index = data.getTrackIndex();
2332            synchronized (mIndexTrackPairs) {
2333                for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
2334                    if (p.first != null && p.first == index && p.second != null) {
2335                        // inband subtitle track that owns data
2336                        SubtitleTrack track = p.second;
2337                        track.onData(data);
2338                    }
2339                }
2340            }
2341        }
2342    };
2343
2344    /** @hide */
2345    @Override
2346    public void onSubtitleTrackSelected(SubtitleTrack track) {
2347        if (mSelectedSubtitleTrackIndex >= 0) {
2348            try {
2349                selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, false);
2350            } catch (IllegalStateException e) {
2351            }
2352            mSelectedSubtitleTrackIndex = -1;
2353        }
2354        setOnSubtitleDataListener(null);
2355        if (track == null) {
2356            return;
2357        }
2358
2359        synchronized (mIndexTrackPairs) {
2360            for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
2361                if (p.first != null && p.second == track) {
2362                    // inband subtitle track that is selected
2363                    mSelectedSubtitleTrackIndex = p.first;
2364                    break;
2365                }
2366            }
2367        }
2368
2369        if (mSelectedSubtitleTrackIndex >= 0) {
2370            try {
2371                selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true);
2372            } catch (IllegalStateException e) {
2373            }
2374            setOnSubtitleDataListener(mSubtitleDataListener);
2375        }
2376        // no need to select out-of-band tracks
2377    }
2378
2379    /** @hide */
2380    public void addSubtitleSource(InputStream is, MediaFormat format)
2381            throws IllegalStateException
2382    {
2383        final InputStream fIs = is;
2384        final MediaFormat fFormat = format;
2385
2386        if (is != null) {
2387            // Ensure all input streams are closed.  It is also a handy
2388            // way to implement timeouts in the future.
2389            synchronized(mOpenSubtitleSources) {
2390                mOpenSubtitleSources.add(is);
2391            }
2392        } else {
2393            Log.w(TAG, "addSubtitleSource called with null InputStream");
2394        }
2395
2396        getMediaTimeProvider();
2397
2398        // process each subtitle in its own thread
2399        final HandlerThread thread = new HandlerThread("SubtitleReadThread",
2400              Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
2401        thread.start();
2402        Handler handler = new Handler(thread.getLooper());
2403        handler.post(new Runnable() {
2404            private int addTrack() {
2405                if (fIs == null || mSubtitleController == null) {
2406                    return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
2407                }
2408
2409                SubtitleTrack track = mSubtitleController.addTrack(fFormat);
2410                if (track == null) {
2411                    return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
2412                }
2413
2414                // TODO: do the conversion in the subtitle track
2415                Scanner scanner = new Scanner(fIs, "UTF-8");
2416                String contents = scanner.useDelimiter("\\A").next();
2417                synchronized(mOpenSubtitleSources) {
2418                    mOpenSubtitleSources.remove(fIs);
2419                }
2420                scanner.close();
2421                synchronized (mIndexTrackPairs) {
2422                    mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
2423                }
2424                Handler h = mTimeProvider.mEventHandler;
2425                int what = TimeProvider.NOTIFY;
2426                int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
2427                Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, contents.getBytes());
2428                Message m = h.obtainMessage(what, arg1, 0, trackData);
2429                h.sendMessage(m);
2430                return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
2431            }
2432
2433            public void run() {
2434                int res = addTrack();
2435                if (mEventHandler != null) {
2436                    Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
2437                    mEventHandler.sendMessage(m);
2438                }
2439                thread.getLooper().quitSafely();
2440            }
2441        });
2442    }
2443
2444    private void scanInternalSubtitleTracks() {
2445        setSubtitleAnchor();
2446
2447        populateInbandTracks();
2448
2449        if (mSubtitleController != null) {
2450            mSubtitleController.selectDefaultTrack();
2451        }
2452    }
2453
2454    private void populateInbandTracks() {
2455        TrackInfo[] tracks = getInbandTrackInfo();
2456        synchronized (mIndexTrackPairs) {
2457            for (int i = 0; i < tracks.length; i++) {
2458                if (mInbandTrackIndices.get(i)) {
2459                    continue;
2460                } else {
2461                    mInbandTrackIndices.set(i);
2462                }
2463
2464                // newly appeared inband track
2465                if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
2466                    SubtitleTrack track = mSubtitleController.addTrack(
2467                            tracks[i].getFormat());
2468                    mIndexTrackPairs.add(Pair.create(i, track));
2469                } else {
2470                    mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(i, null));
2471                }
2472            }
2473        }
2474    }
2475
2476    /* TODO: Limit the total number of external timed text source to a reasonable number.
2477     */
2478    /**
2479     * Adds an external timed text source file.
2480     *
2481     * Currently supported format is SubRip with the file extension .srt, case insensitive.
2482     * Note that a single external timed text source may contain multiple tracks in it.
2483     * One can find the total number of available tracks using {@link #getTrackInfo()} to see what
2484     * additional tracks become available after this method call.
2485     *
2486     * @param path The file path of external timed text source file.
2487     * @param mimeType The mime type of the file. Must be one of the mime types listed above.
2488     * @throws IOException if the file cannot be accessed or is corrupted.
2489     * @throws IllegalArgumentException if the mimeType is not supported.
2490     * @throws IllegalStateException if called in an invalid state.
2491     */
2492    public void addTimedTextSource(String path, String mimeType)
2493            throws IOException, IllegalArgumentException, IllegalStateException {
2494        if (!availableMimeTypeForExternalSource(mimeType)) {
2495            final String msg = "Illegal mimeType for timed text source: " + mimeType;
2496            throw new IllegalArgumentException(msg);
2497        }
2498
2499        File file = new File(path);
2500        if (file.exists()) {
2501            FileInputStream is = new FileInputStream(file);
2502            FileDescriptor fd = is.getFD();
2503            addTimedTextSource(fd, mimeType);
2504            is.close();
2505        } else {
2506            // We do not support the case where the path is not a file.
2507            throw new IOException(path);
2508        }
2509    }
2510
2511    /**
2512     * Adds an external timed text source file (Uri).
2513     *
2514     * Currently supported format is SubRip with the file extension .srt, case insensitive.
2515     * Note that a single external timed text source may contain multiple tracks in it.
2516     * One can find the total number of available tracks using {@link #getTrackInfo()} to see what
2517     * additional tracks become available after this method call.
2518     *
2519     * @param context the Context to use when resolving the Uri
2520     * @param uri the Content URI of the data you want to play
2521     * @param mimeType The mime type of the file. Must be one of the mime types listed above.
2522     * @throws IOException if the file cannot be accessed or is corrupted.
2523     * @throws IllegalArgumentException if the mimeType is not supported.
2524     * @throws IllegalStateException if called in an invalid state.
2525     */
2526    public void addTimedTextSource(Context context, Uri uri, String mimeType)
2527            throws IOException, IllegalArgumentException, IllegalStateException {
2528        String scheme = uri.getScheme();
2529        if(scheme == null || scheme.equals("file")) {
2530            addTimedTextSource(uri.getPath(), mimeType);
2531            return;
2532        }
2533
2534        AssetFileDescriptor fd = null;
2535        try {
2536            ContentResolver resolver = context.getContentResolver();
2537            fd = resolver.openAssetFileDescriptor(uri, "r");
2538            if (fd == null) {
2539                return;
2540            }
2541            addTimedTextSource(fd.getFileDescriptor(), mimeType);
2542            return;
2543        } catch (SecurityException ex) {
2544        } catch (IOException ex) {
2545        } finally {
2546            if (fd != null) {
2547                fd.close();
2548            }
2549        }
2550    }
2551
2552    /**
2553     * Adds an external timed text source file (FileDescriptor).
2554     *
2555     * It is the caller's responsibility to close the file descriptor.
2556     * It is safe to do so as soon as this call returns.
2557     *
2558     * Currently supported format is SubRip. Note that a single external timed text source may
2559     * contain multiple tracks in it. One can find the total number of available tracks
2560     * using {@link #getTrackInfo()} to see what additional tracks become available
2561     * after this method call.
2562     *
2563     * @param fd the FileDescriptor for the file you want to play
2564     * @param mimeType The mime type of the file. Must be one of the mime types listed above.
2565     * @throws IllegalArgumentException if the mimeType is not supported.
2566     * @throws IllegalStateException if called in an invalid state.
2567     */
2568    public void addTimedTextSource(FileDescriptor fd, String mimeType)
2569            throws IllegalArgumentException, IllegalStateException {
2570        // intentionally less than LONG_MAX
2571        addTimedTextSource(fd, 0, 0x7ffffffffffffffL, mimeType);
2572    }
2573
2574    /**
2575     * Adds an external timed text file (FileDescriptor).
2576     *
2577     * It is the caller's responsibility to close the file descriptor.
2578     * It is safe to do so as soon as this call returns.
2579     *
2580     * Currently supported format is SubRip. Note that a single external timed text source may
2581     * contain multiple tracks in it. One can find the total number of available tracks
2582     * using {@link #getTrackInfo()} to see what additional tracks become available
2583     * after this method call.
2584     *
2585     * @param fd the FileDescriptor for the file you want to play
2586     * @param offset the offset into the file where the data to be played starts, in bytes
2587     * @param length the length in bytes of the data to be played
2588     * @param mime The mime type of the file. Must be one of the mime types listed above.
2589     * @throws IllegalArgumentException if the mimeType is not supported.
2590     * @throws IllegalStateException if called in an invalid state.
2591     */
2592    public void addTimedTextSource(FileDescriptor fd, long offset, long length, String mime)
2593            throws IllegalArgumentException, IllegalStateException {
2594        if (!availableMimeTypeForExternalSource(mime)) {
2595            throw new IllegalArgumentException("Illegal mimeType for timed text source: " + mime);
2596        }
2597
2598        final FileDescriptor dupedFd;
2599        try {
2600            dupedFd = Libcore.os.dup(fd);
2601        } catch (ErrnoException ex) {
2602            Log.e(TAG, ex.getMessage(), ex);
2603            throw new RuntimeException(ex);
2604        }
2605
2606        final MediaFormat fFormat = new MediaFormat();
2607        fFormat.setString(MediaFormat.KEY_MIME, mime);
2608        fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1);
2609
2610        // A MediaPlayer created by a VideoView should already have its mSubtitleController set.
2611        if (mSubtitleController == null) {
2612            setSubtitleAnchor();
2613        }
2614
2615        if (!mSubtitleController.hasRendererFor(fFormat)) {
2616            // test and add not atomic
2617            Context context = ActivityThread.currentApplication();
2618            mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler));
2619        }
2620        final SubtitleTrack track = mSubtitleController.addTrack(fFormat);
2621        synchronized (mIndexTrackPairs) {
2622            mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
2623        }
2624
2625        getMediaTimeProvider();
2626
2627        final long offset2 = offset;
2628        final long length2 = length;
2629        final HandlerThread thread = new HandlerThread(
2630                "TimedTextReadThread",
2631                Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
2632        thread.start();
2633        Handler handler = new Handler(thread.getLooper());
2634        handler.post(new Runnable() {
2635            private int addTrack() {
2636                final ByteArrayOutputStream bos = new ByteArrayOutputStream();
2637                try {
2638                    Libcore.os.lseek(dupedFd, offset2, OsConstants.SEEK_SET);
2639                    byte[] buffer = new byte[4096];
2640                    for (long total = 0; total < length2;) {
2641                        int bytesToRead = (int) Math.min(buffer.length, length2 - total);
2642                        int bytes = IoBridge.read(dupedFd, buffer, 0, bytesToRead);
2643                        if (bytes < 0) {
2644                            break;
2645                        } else {
2646                            bos.write(buffer, 0, bytes);
2647                            total += bytes;
2648                        }
2649                    }
2650                    Handler h = mTimeProvider.mEventHandler;
2651                    int what = TimeProvider.NOTIFY;
2652                    int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
2653                    Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, bos.toByteArray());
2654                    Message m = h.obtainMessage(what, arg1, 0, trackData);
2655                    h.sendMessage(m);
2656                    return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
2657                } catch (Exception e) {
2658                    Log.e(TAG, e.getMessage(), e);
2659                    return MEDIA_INFO_TIMED_TEXT_ERROR;
2660                } finally {
2661                    try {
2662                        Libcore.os.close(dupedFd);
2663                    } catch (ErrnoException e) {
2664                        Log.e(TAG, e.getMessage(), e);
2665                    }
2666                }
2667            }
2668
2669            public void run() {
2670                int res = addTrack();
2671                if (mEventHandler != null) {
2672                    Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
2673                    mEventHandler.sendMessage(m);
2674                }
2675                thread.getLooper().quitSafely();
2676            }
2677        });
2678    }
2679
2680    /**
2681     * Returns the index of the audio, video, or subtitle track currently selected for playback,
2682     * The return value is an index into the array returned by {@link #getTrackInfo()}, and can
2683     * be used in calls to {@link #selectTrack(int)} or {@link #deselectTrack(int)}.
2684     *
2685     * @param trackType should be one of {@link TrackInfo#MEDIA_TRACK_TYPE_VIDEO},
2686     * {@link TrackInfo#MEDIA_TRACK_TYPE_AUDIO}, or
2687     * {@link TrackInfo#MEDIA_TRACK_TYPE_SUBTITLE}
2688     * @return index of the audio, video, or subtitle track currently selected for playback;
2689     * a negative integer is returned when there is no selected track for {@code trackType} or
2690     * when {@code trackType} is not one of audio, video, or subtitle.
2691     * @throws IllegalStateException if called after {@link #release()}
2692     *
2693     * @see #getTrackInfo()
2694     * @see #selectTrack(int)
2695     * @see #deselectTrack(int)
2696     */
2697    public int getSelectedTrack(int trackType) throws IllegalStateException {
2698        if (mSubtitleController != null
2699                && (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE
2700                || trackType == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT)) {
2701            SubtitleTrack subtitleTrack = mSubtitleController.getSelectedTrack();
2702            if (subtitleTrack != null) {
2703                synchronized (mIndexTrackPairs) {
2704                    for (int i = 0; i < mIndexTrackPairs.size(); i++) {
2705                        Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
2706                        if (p.second == subtitleTrack && subtitleTrack.getTrackType() == trackType) {
2707                            return i;
2708                        }
2709                    }
2710                }
2711            }
2712        }
2713
2714        Parcel request = Parcel.obtain();
2715        Parcel reply = Parcel.obtain();
2716        try {
2717            request.writeInterfaceToken(IMEDIA_PLAYER);
2718            request.writeInt(INVOKE_ID_GET_SELECTED_TRACK);
2719            request.writeInt(trackType);
2720            invoke(request, reply);
2721            int inbandTrackIndex = reply.readInt();
2722            synchronized (mIndexTrackPairs) {
2723                for (int i = 0; i < mIndexTrackPairs.size(); i++) {
2724                    Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
2725                    if (p.first != null && p.first == inbandTrackIndex) {
2726                        return i;
2727                    }
2728                }
2729            }
2730            return -1;
2731        } finally {
2732            request.recycle();
2733            reply.recycle();
2734        }
2735    }
2736
2737    /**
2738     * Selects a track.
2739     * <p>
2740     * If a MediaPlayer is in invalid state, it throws an IllegalStateException exception.
2741     * If a MediaPlayer is in <em>Started</em> state, the selected track is presented immediately.
2742     * If a MediaPlayer is not in Started state, it just marks the track to be played.
2743     * </p>
2744     * <p>
2745     * In any valid state, if it is called multiple times on the same type of track (ie. Video,
2746     * Audio, Timed Text), the most recent one will be chosen.
2747     * </p>
2748     * <p>
2749     * The first audio and video tracks are selected by default if available, even though
2750     * this method is not called. However, no timed text track will be selected until
2751     * this function is called.
2752     * </p>
2753     * <p>
2754     * Currently, only timed text tracks or audio tracks can be selected via this method.
2755     * In addition, the support for selecting an audio track at runtime is pretty limited
2756     * in that an audio track can only be selected in the <em>Prepared</em> state.
2757     * </p>
2758     * @param index the index of the track to be selected. The valid range of the index
2759     * is 0..total number of track - 1. The total number of tracks as well as the type of
2760     * each individual track can be found by calling {@link #getTrackInfo()} method.
2761     * @throws IllegalStateException if called in an invalid state.
2762     *
2763     * @see android.media.MediaPlayer#getTrackInfo
2764     */
2765    public void selectTrack(int index) throws IllegalStateException {
2766        selectOrDeselectTrack(index, true /* select */);
2767    }
2768
2769    /**
2770     * Deselect a track.
2771     * <p>
2772     * Currently, the track must be a timed text track and no audio or video tracks can be
2773     * deselected. If the timed text track identified by index has not been
2774     * selected before, it throws an exception.
2775     * </p>
2776     * @param index the index of the track to be deselected. The valid range of the index
2777     * is 0..total number of tracks - 1. The total number of tracks as well as the type of
2778     * each individual track can be found by calling {@link #getTrackInfo()} method.
2779     * @throws IllegalStateException if called in an invalid state.
2780     *
2781     * @see android.media.MediaPlayer#getTrackInfo
2782     */
2783    public void deselectTrack(int index) throws IllegalStateException {
2784        selectOrDeselectTrack(index, false /* select */);
2785    }
2786
2787    private void selectOrDeselectTrack(int index, boolean select)
2788            throws IllegalStateException {
2789        // handle subtitle track through subtitle controller
2790        populateInbandTracks();
2791
2792        Pair<Integer,SubtitleTrack> p = null;
2793        try {
2794            p = mIndexTrackPairs.get(index);
2795        } catch (ArrayIndexOutOfBoundsException e) {
2796            // ignore bad index
2797            return;
2798        }
2799
2800        SubtitleTrack track = p.second;
2801        if (track == null) {
2802            // inband (de)select
2803            selectOrDeselectInbandTrack(p.first, select);
2804            return;
2805        }
2806
2807        if (mSubtitleController == null) {
2808            return;
2809        }
2810
2811        if (!select) {
2812            // out-of-band deselect
2813            if (mSubtitleController.getSelectedTrack() == track) {
2814                mSubtitleController.selectTrack(null);
2815            } else {
2816                Log.w(TAG, "trying to deselect track that was not selected");
2817            }
2818            return;
2819        }
2820
2821        // out-of-band select
2822        if (track.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT) {
2823            int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
2824            synchronized (mIndexTrackPairs) {
2825                if (ttIndex >= 0 && ttIndex < mIndexTrackPairs.size()) {
2826                    Pair<Integer,SubtitleTrack> p2 = mIndexTrackPairs.get(ttIndex);
2827                    if (p2.first != null && p2.second == null) {
2828                        // deselect inband counterpart
2829                        selectOrDeselectInbandTrack(p2.first, false);
2830                    }
2831                }
2832            }
2833        }
2834        mSubtitleController.selectTrack(track);
2835    }
2836
2837    private void selectOrDeselectInbandTrack(int index, boolean select)
2838            throws IllegalStateException {
2839        Parcel request = Parcel.obtain();
2840        Parcel reply = Parcel.obtain();
2841        try {
2842            request.writeInterfaceToken(IMEDIA_PLAYER);
2843            request.writeInt(select? INVOKE_ID_SELECT_TRACK: INVOKE_ID_DESELECT_TRACK);
2844            request.writeInt(index);
2845            invoke(request, reply);
2846        } finally {
2847            request.recycle();
2848            reply.recycle();
2849        }
2850    }
2851
2852
2853    /**
2854     * @param reply Parcel with audio/video duration info for battery
2855                    tracking usage
2856     * @return The status code.
2857     * {@hide}
2858     */
2859    public native static int native_pullBatteryData(Parcel reply);
2860
2861    /**
2862     * Sets the target UDP re-transmit endpoint for the low level player.
2863     * Generally, the address portion of the endpoint is an IP multicast
2864     * address, although a unicast address would be equally valid.  When a valid
2865     * retransmit endpoint has been set, the media player will not decode and
2866     * render the media presentation locally.  Instead, the player will attempt
2867     * to re-multiplex its media data using the Android@Home RTP profile and
2868     * re-transmit to the target endpoint.  Receiver devices (which may be
2869     * either the same as the transmitting device or different devices) may
2870     * instantiate, prepare, and start a receiver player using a setDataSource
2871     * URL of the form...
2872     *
2873     * aahRX://&lt;multicastIP&gt;:&lt;port&gt;
2874     *
2875     * to receive, decode and render the re-transmitted content.
2876     *
2877     * setRetransmitEndpoint may only be called before setDataSource has been
2878     * called; while the player is in the Idle state.
2879     *
2880     * @param endpoint the address and UDP port of the re-transmission target or
2881     * null if no re-transmission is to be performed.
2882     * @throws IllegalStateException if it is called in an invalid state
2883     * @throws IllegalArgumentException if the retransmit endpoint is supplied,
2884     * but invalid.
2885     *
2886     * {@hide} pending API council
2887     */
2888    public void setRetransmitEndpoint(InetSocketAddress endpoint)
2889            throws IllegalStateException, IllegalArgumentException
2890    {
2891        String addrString = null;
2892        int port = 0;
2893
2894        if (null != endpoint) {
2895            addrString = endpoint.getAddress().getHostAddress();
2896            port = endpoint.getPort();
2897        }
2898
2899        int ret = native_setRetransmitEndpoint(addrString, port);
2900        if (ret != 0) {
2901            throw new IllegalArgumentException("Illegal re-transmit endpoint; native ret " + ret);
2902        }
2903    }
2904
2905    private native final int native_setRetransmitEndpoint(String addrString, int port);
2906
2907    @Override
2908    protected void finalize() {
2909        baseRelease();
2910        native_finalize();
2911    }
2912
2913    /* Do not change these values without updating their counterparts
2914     * in include/media/mediaplayer.h!
2915     */
2916    private static final int MEDIA_NOP = 0; // interface test message
2917    private static final int MEDIA_PREPARED = 1;
2918    private static final int MEDIA_PLAYBACK_COMPLETE = 2;
2919    private static final int MEDIA_BUFFERING_UPDATE = 3;
2920    private static final int MEDIA_SEEK_COMPLETE = 4;
2921    private static final int MEDIA_SET_VIDEO_SIZE = 5;
2922    private static final int MEDIA_STARTED = 6;
2923    private static final int MEDIA_PAUSED = 7;
2924    private static final int MEDIA_STOPPED = 8;
2925    private static final int MEDIA_SKIPPED = 9;
2926    private static final int MEDIA_TIMED_TEXT = 99;
2927    private static final int MEDIA_ERROR = 100;
2928    private static final int MEDIA_INFO = 200;
2929    private static final int MEDIA_SUBTITLE_DATA = 201;
2930    private static final int MEDIA_META_DATA = 202;
2931
2932    private TimeProvider mTimeProvider;
2933
2934    /** @hide */
2935    public MediaTimeProvider getMediaTimeProvider() {
2936        if (mTimeProvider == null) {
2937            mTimeProvider = new TimeProvider(this);
2938        }
2939        return mTimeProvider;
2940    }
2941
2942    private class EventHandler extends Handler
2943    {
2944        private MediaPlayer mMediaPlayer;
2945
2946        public EventHandler(MediaPlayer mp, Looper looper) {
2947            super(looper);
2948            mMediaPlayer = mp;
2949        }
2950
2951        @Override
2952        public void handleMessage(Message msg) {
2953            if (mMediaPlayer.mNativeContext == 0) {
2954                Log.w(TAG, "mediaplayer went away with unhandled events");
2955                return;
2956            }
2957            switch(msg.what) {
2958            case MEDIA_PREPARED:
2959                try {
2960                    scanInternalSubtitleTracks();
2961                } catch (RuntimeException e) {
2962                    // send error message instead of crashing;
2963                    // send error message instead of inlining a call to onError
2964                    // to avoid code duplication.
2965                    Message msg2 = obtainMessage(
2966                            MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null);
2967                    sendMessage(msg2);
2968                }
2969                OnPreparedListener onPreparedListener = mOnPreparedListener;
2970                if (onPreparedListener != null)
2971                    onPreparedListener.onPrepared(mMediaPlayer);
2972                return;
2973
2974            case MEDIA_PLAYBACK_COMPLETE:
2975                {
2976                    mOnCompletionInternalListener.onCompletion(mMediaPlayer);
2977                    OnCompletionListener onCompletionListener = mOnCompletionListener;
2978                    if (onCompletionListener != null)
2979                        onCompletionListener.onCompletion(mMediaPlayer);
2980                }
2981                stayAwake(false);
2982                return;
2983
2984            case MEDIA_STOPPED:
2985                {
2986                    TimeProvider timeProvider = mTimeProvider;
2987                    if (timeProvider != null) {
2988                        timeProvider.onStopped();
2989                    }
2990                }
2991                break;
2992
2993            case MEDIA_STARTED:
2994            case MEDIA_PAUSED:
2995                {
2996                    TimeProvider timeProvider = mTimeProvider;
2997                    if (timeProvider != null) {
2998                        timeProvider.onPaused(msg.what == MEDIA_PAUSED);
2999                    }
3000                }
3001                break;
3002
3003            case MEDIA_BUFFERING_UPDATE:
3004                OnBufferingUpdateListener onBufferingUpdateListener = mOnBufferingUpdateListener;
3005                if (onBufferingUpdateListener != null)
3006                    onBufferingUpdateListener.onBufferingUpdate(mMediaPlayer, msg.arg1);
3007                return;
3008
3009            case MEDIA_SEEK_COMPLETE:
3010                OnSeekCompleteListener onSeekCompleteListener = mOnSeekCompleteListener;
3011                if (onSeekCompleteListener != null) {
3012                    onSeekCompleteListener.onSeekComplete(mMediaPlayer);
3013                }
3014                // fall through
3015
3016            case MEDIA_SKIPPED:
3017                {
3018                    TimeProvider timeProvider = mTimeProvider;
3019                    if (timeProvider != null) {
3020                        timeProvider.onSeekComplete(mMediaPlayer);
3021                    }
3022                }
3023                return;
3024
3025            case MEDIA_SET_VIDEO_SIZE:
3026                OnVideoSizeChangedListener onVideoSizeChangedListener = mOnVideoSizeChangedListener;
3027                if (onVideoSizeChangedListener != null) {
3028                    onVideoSizeChangedListener.onVideoSizeChanged(
3029                        mMediaPlayer, msg.arg1, msg.arg2);
3030                }
3031                return;
3032
3033            case MEDIA_ERROR:
3034                Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")");
3035                boolean error_was_handled = false;
3036                OnErrorListener onErrorListener = mOnErrorListener;
3037                if (onErrorListener != null) {
3038                    error_was_handled = onErrorListener.onError(mMediaPlayer, msg.arg1, msg.arg2);
3039                }
3040                {
3041                    mOnCompletionInternalListener.onCompletion(mMediaPlayer);
3042                    OnCompletionListener onCompletionListener = mOnCompletionListener;
3043                    if (onCompletionListener != null && ! error_was_handled) {
3044                        onCompletionListener.onCompletion(mMediaPlayer);
3045                    }
3046                }
3047                stayAwake(false);
3048                return;
3049
3050            case MEDIA_INFO:
3051                switch (msg.arg1) {
3052                case MEDIA_INFO_VIDEO_TRACK_LAGGING:
3053                    Log.i(TAG, "Info (" + msg.arg1 + "," + msg.arg2 + ")");
3054                    break;
3055                case MEDIA_INFO_METADATA_UPDATE:
3056                    try {
3057                        scanInternalSubtitleTracks();
3058                    } catch (RuntimeException e) {
3059                        Message msg2 = obtainMessage(
3060                                MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null);
3061                        sendMessage(msg2);
3062                    }
3063                    // fall through
3064
3065                case MEDIA_INFO_EXTERNAL_METADATA_UPDATE:
3066                    msg.arg1 = MEDIA_INFO_METADATA_UPDATE;
3067                    // update default track selection
3068                    if (mSubtitleController != null) {
3069                        mSubtitleController.selectDefaultTrack();
3070                    }
3071                    break;
3072                case MEDIA_INFO_BUFFERING_START:
3073                case MEDIA_INFO_BUFFERING_END:
3074                    TimeProvider timeProvider = mTimeProvider;
3075                    if (timeProvider != null) {
3076                        timeProvider.onBuffering(msg.arg1 == MEDIA_INFO_BUFFERING_START);
3077                    }
3078                    break;
3079                }
3080
3081                OnInfoListener onInfoListener = mOnInfoListener;
3082                if (onInfoListener != null) {
3083                    onInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2);
3084                }
3085                // No real default action so far.
3086                return;
3087            case MEDIA_TIMED_TEXT:
3088                OnTimedTextListener onTimedTextListener = mOnTimedTextListener;
3089                if (onTimedTextListener == null)
3090                    return;
3091                if (msg.obj == null) {
3092                    onTimedTextListener.onTimedText(mMediaPlayer, null);
3093                } else {
3094                    if (msg.obj instanceof Parcel) {
3095                        Parcel parcel = (Parcel)msg.obj;
3096                        TimedText text = new TimedText(parcel);
3097                        parcel.recycle();
3098                        onTimedTextListener.onTimedText(mMediaPlayer, text);
3099                    }
3100                }
3101                return;
3102
3103            case MEDIA_SUBTITLE_DATA:
3104                OnSubtitleDataListener onSubtitleDataListener = mOnSubtitleDataListener;
3105                if (onSubtitleDataListener == null) {
3106                    return;
3107                }
3108                if (msg.obj instanceof Parcel) {
3109                    Parcel parcel = (Parcel) msg.obj;
3110                    SubtitleData data = new SubtitleData(parcel);
3111                    parcel.recycle();
3112                    onSubtitleDataListener.onSubtitleData(mMediaPlayer, data);
3113                }
3114                return;
3115
3116            case MEDIA_META_DATA:
3117                OnTimedMetaDataAvailableListener onTimedMetaDataAvailableListener =
3118                    mOnTimedMetaDataAvailableListener;
3119                if (onTimedMetaDataAvailableListener == null) {
3120                    return;
3121                }
3122                if (msg.obj instanceof Parcel) {
3123                    Parcel parcel = (Parcel) msg.obj;
3124                    TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel);
3125                    parcel.recycle();
3126                    onTimedMetaDataAvailableListener.onTimedMetaDataAvailable(mMediaPlayer, data);
3127                }
3128                return;
3129
3130            case MEDIA_NOP: // interface test message - ignore
3131                break;
3132
3133            default:
3134                Log.e(TAG, "Unknown message type " + msg.what);
3135                return;
3136            }
3137        }
3138    }
3139
3140    /*
3141     * Called from native code when an interesting event happens.  This method
3142     * just uses the EventHandler system to post the event back to the main app thread.
3143     * We use a weak reference to the original MediaPlayer object so that the native
3144     * code is safe from the object disappearing from underneath it.  (This is
3145     * the cookie passed to native_setup().)
3146     */
3147    private static void postEventFromNative(Object mediaplayer_ref,
3148                                            int what, int arg1, int arg2, Object obj)
3149    {
3150        MediaPlayer mp = (MediaPlayer)((WeakReference)mediaplayer_ref).get();
3151        if (mp == null) {
3152            return;
3153        }
3154
3155        if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) {
3156            // this acquires the wakelock if needed, and sets the client side state
3157            mp.start();
3158        }
3159        if (mp.mEventHandler != null) {
3160            Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj);
3161            mp.mEventHandler.sendMessage(m);
3162        }
3163    }
3164
3165    /**
3166     * Interface definition for a callback to be invoked when the media
3167     * source is ready for playback.
3168     */
3169    public interface OnPreparedListener
3170    {
3171        /**
3172         * Called when the media file is ready for playback.
3173         *
3174         * @param mp the MediaPlayer that is ready for playback
3175         */
3176        void onPrepared(MediaPlayer mp);
3177    }
3178
3179    /**
3180     * Register a callback to be invoked when the media source is ready
3181     * for playback.
3182     *
3183     * @param listener the callback that will be run
3184     */
3185    public void setOnPreparedListener(OnPreparedListener listener)
3186    {
3187        mOnPreparedListener = listener;
3188    }
3189
3190    private OnPreparedListener mOnPreparedListener;
3191
3192    /**
3193     * Interface definition for a callback to be invoked when playback of
3194     * a media source has completed.
3195     */
3196    public interface OnCompletionListener
3197    {
3198        /**
3199         * Called when the end of a media source is reached during playback.
3200         *
3201         * @param mp the MediaPlayer that reached the end of the file
3202         */
3203        void onCompletion(MediaPlayer mp);
3204    }
3205
3206    /**
3207     * Register a callback to be invoked when the end of a media source
3208     * has been reached during playback.
3209     *
3210     * @param listener the callback that will be run
3211     */
3212    public void setOnCompletionListener(OnCompletionListener listener)
3213    {
3214        mOnCompletionListener = listener;
3215    }
3216
3217    private OnCompletionListener mOnCompletionListener;
3218
3219    /**
3220     * @hide
3221     * Internal completion listener to update PlayerBase of the play state. Always "registered".
3222     */
3223    private final OnCompletionListener mOnCompletionInternalListener = new OnCompletionListener() {
3224        @Override
3225        public void onCompletion(MediaPlayer mp) {
3226            baseStop();
3227        }
3228    };
3229
3230    /**
3231     * Interface definition of a callback to be invoked indicating buffering
3232     * status of a media resource being streamed over the network.
3233     */
3234    public interface OnBufferingUpdateListener
3235    {
3236        /**
3237         * Called to update status in buffering a media stream received through
3238         * progressive HTTP download. The received buffering percentage
3239         * indicates how much of the content has been buffered or played.
3240         * For example a buffering update of 80 percent when half the content
3241         * has already been played indicates that the next 30 percent of the
3242         * content to play has been buffered.
3243         *
3244         * @param mp      the MediaPlayer the update pertains to
3245         * @param percent the percentage (0-100) of the content
3246         *                that has been buffered or played thus far
3247         */
3248        void onBufferingUpdate(MediaPlayer mp, int percent);
3249    }
3250
3251    /**
3252     * Register a callback to be invoked when the status of a network
3253     * stream's buffer has changed.
3254     *
3255     * @param listener the callback that will be run.
3256     */
3257    public void setOnBufferingUpdateListener(OnBufferingUpdateListener listener)
3258    {
3259        mOnBufferingUpdateListener = listener;
3260    }
3261
3262    private OnBufferingUpdateListener mOnBufferingUpdateListener;
3263
3264    /**
3265     * Interface definition of a callback to be invoked indicating
3266     * the completion of a seek operation.
3267     */
3268    public interface OnSeekCompleteListener
3269    {
3270        /**
3271         * Called to indicate the completion of a seek operation.
3272         *
3273         * @param mp the MediaPlayer that issued the seek operation
3274         */
3275        public void onSeekComplete(MediaPlayer mp);
3276    }
3277
3278    /**
3279     * Register a callback to be invoked when a seek operation has been
3280     * completed.
3281     *
3282     * @param listener the callback that will be run
3283     */
3284    public void setOnSeekCompleteListener(OnSeekCompleteListener listener)
3285    {
3286        mOnSeekCompleteListener = listener;
3287    }
3288
3289    private OnSeekCompleteListener mOnSeekCompleteListener;
3290
3291    /**
3292     * Interface definition of a callback to be invoked when the
3293     * video size is first known or updated
3294     */
3295    public interface OnVideoSizeChangedListener
3296    {
3297        /**
3298         * Called to indicate the video size
3299         *
3300         * The video size (width and height) could be 0 if there was no video,
3301         * no display surface was set, or the value was not determined yet.
3302         *
3303         * @param mp        the MediaPlayer associated with this callback
3304         * @param width     the width of the video
3305         * @param height    the height of the video
3306         */
3307        public void onVideoSizeChanged(MediaPlayer mp, int width, int height);
3308    }
3309
3310    /**
3311     * Register a callback to be invoked when the video size is
3312     * known or updated.
3313     *
3314     * @param listener the callback that will be run
3315     */
3316    public void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener)
3317    {
3318        mOnVideoSizeChangedListener = listener;
3319    }
3320
3321    private OnVideoSizeChangedListener mOnVideoSizeChangedListener;
3322
3323    /**
3324     * Interface definition of a callback to be invoked when a
3325     * timed text is available for display.
3326     */
3327    public interface OnTimedTextListener
3328    {
3329        /**
3330         * Called to indicate an avaliable timed text
3331         *
3332         * @param mp             the MediaPlayer associated with this callback
3333         * @param text           the timed text sample which contains the text
3334         *                       needed to be displayed and the display format.
3335         */
3336        public void onTimedText(MediaPlayer mp, TimedText text);
3337    }
3338
3339    /**
3340     * Register a callback to be invoked when a timed text is available
3341     * for display.
3342     *
3343     * @param listener the callback that will be run
3344     */
3345    public void setOnTimedTextListener(OnTimedTextListener listener)
3346    {
3347        mOnTimedTextListener = listener;
3348    }
3349
3350    private OnTimedTextListener mOnTimedTextListener;
3351
3352    /**
3353     * Interface definition of a callback to be invoked when a
3354     * track has data available.
3355     *
3356     * @hide
3357     */
3358    public interface OnSubtitleDataListener
3359    {
3360        public void onSubtitleData(MediaPlayer mp, SubtitleData data);
3361    }
3362
3363    /**
3364     * Register a callback to be invoked when a track has data available.
3365     *
3366     * @param listener the callback that will be run
3367     *
3368     * @hide
3369     */
3370    public void setOnSubtitleDataListener(OnSubtitleDataListener listener)
3371    {
3372        mOnSubtitleDataListener = listener;
3373    }
3374
3375    private OnSubtitleDataListener mOnSubtitleDataListener;
3376
3377    /**
3378     * Interface definition of a callback to be invoked when a
3379     * track has timed metadata available.
3380     *
3381     * @see MediaPlayer#setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener)
3382     */
3383    public interface OnTimedMetaDataAvailableListener
3384    {
3385        /**
3386         * Called to indicate avaliable timed metadata
3387         * <p>
3388         * This method will be called as timed metadata is extracted from the media,
3389         * in the same order as it occurs in the media. The timing of this event is
3390         * not controlled by the associated timestamp.
3391         *
3392         * @param mp             the MediaPlayer associated with this callback
3393         * @param data           the timed metadata sample associated with this event
3394         */
3395        public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData data);
3396    }
3397
3398    /**
3399     * Register a callback to be invoked when a selected track has timed metadata available.
3400     * <p>
3401     * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates
3402     * {@link TimedMetaData}.
3403     *
3404     * @see MediaPlayer#selectTrack(int)
3405     * @see MediaPlayer.OnTimedMetaDataAvailableListener
3406     * @see TimedMetaData
3407     *
3408     * @param listener the callback that will be run
3409     */
3410    public void setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener listener)
3411    {
3412        mOnTimedMetaDataAvailableListener = listener;
3413    }
3414
3415    private OnTimedMetaDataAvailableListener mOnTimedMetaDataAvailableListener;
3416
3417    /* Do not change these values without updating their counterparts
3418     * in include/media/mediaplayer.h!
3419     */
3420    /** Unspecified media player error.
3421     * @see android.media.MediaPlayer.OnErrorListener
3422     */
3423    public static final int MEDIA_ERROR_UNKNOWN = 1;
3424
3425    /** Media server died. In this case, the application must release the
3426     * MediaPlayer object and instantiate a new one.
3427     * @see android.media.MediaPlayer.OnErrorListener
3428     */
3429    public static final int MEDIA_ERROR_SERVER_DIED = 100;
3430
3431    /** The video is streamed and its container is not valid for progressive
3432     * playback i.e the video's index (e.g moov atom) is not at the start of the
3433     * file.
3434     * @see android.media.MediaPlayer.OnErrorListener
3435     */
3436    public static final int MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200;
3437
3438    /** File or network related operation errors. */
3439    public static final int MEDIA_ERROR_IO = -1004;
3440    /** Bitstream is not conforming to the related coding standard or file spec. */
3441    public static final int MEDIA_ERROR_MALFORMED = -1007;
3442    /** Bitstream is conforming to the related coding standard or file spec, but
3443     * the media framework does not support the feature. */
3444    public static final int MEDIA_ERROR_UNSUPPORTED = -1010;
3445    /** Some operation takes too long to complete, usually more than 3-5 seconds. */
3446    public static final int MEDIA_ERROR_TIMED_OUT = -110;
3447
3448    /** Unspecified low-level system error. This value originated from UNKNOWN_ERROR in
3449     * system/core/include/utils/Errors.h
3450     * @see android.media.MediaPlayer.OnErrorListener
3451     * @hide
3452     */
3453    public static final int MEDIA_ERROR_SYSTEM = -2147483648;
3454
3455    /**
3456     * Interface definition of a callback to be invoked when there
3457     * has been an error during an asynchronous operation (other errors
3458     * will throw exceptions at method call time).
3459     */
3460    public interface OnErrorListener
3461    {
3462        /**
3463         * Called to indicate an error.
3464         *
3465         * @param mp      the MediaPlayer the error pertains to
3466         * @param what    the type of error that has occurred:
3467         * <ul>
3468         * <li>{@link #MEDIA_ERROR_UNKNOWN}
3469         * <li>{@link #MEDIA_ERROR_SERVER_DIED}
3470         * </ul>
3471         * @param extra an extra code, specific to the error. Typically
3472         * implementation dependent.
3473         * <ul>
3474         * <li>{@link #MEDIA_ERROR_IO}
3475         * <li>{@link #MEDIA_ERROR_MALFORMED}
3476         * <li>{@link #MEDIA_ERROR_UNSUPPORTED}
3477         * <li>{@link #MEDIA_ERROR_TIMED_OUT}
3478         * <li><code>MEDIA_ERROR_SYSTEM (-2147483648)</code> - low-level system error.
3479         * </ul>
3480         * @return True if the method handled the error, false if it didn't.
3481         * Returning false, or not having an OnErrorListener at all, will
3482         * cause the OnCompletionListener to be called.
3483         */
3484        boolean onError(MediaPlayer mp, int what, int extra);
3485    }
3486
3487    /**
3488     * Register a callback to be invoked when an error has happened
3489     * during an asynchronous operation.
3490     *
3491     * @param listener the callback that will be run
3492     */
3493    public void setOnErrorListener(OnErrorListener listener)
3494    {
3495        mOnErrorListener = listener;
3496    }
3497
3498    private OnErrorListener mOnErrorListener;
3499
3500
3501    /* Do not change these values without updating their counterparts
3502     * in include/media/mediaplayer.h!
3503     */
3504    /** Unspecified media player info.
3505     * @see android.media.MediaPlayer.OnInfoListener
3506     */
3507    public static final int MEDIA_INFO_UNKNOWN = 1;
3508
3509    /** The player was started because it was used as the next player for another
3510     * player, which just completed playback.
3511     * @see android.media.MediaPlayer.OnInfoListener
3512     * @hide
3513     */
3514    public static final int MEDIA_INFO_STARTED_AS_NEXT = 2;
3515
3516    /** The player just pushed the very first video frame for rendering.
3517     * @see android.media.MediaPlayer.OnInfoListener
3518     */
3519    public static final int MEDIA_INFO_VIDEO_RENDERING_START = 3;
3520
3521    /** The video is too complex for the decoder: it can't decode frames fast
3522     *  enough. Possibly only the audio plays fine at this stage.
3523     * @see android.media.MediaPlayer.OnInfoListener
3524     */
3525    public static final int MEDIA_INFO_VIDEO_TRACK_LAGGING = 700;
3526
3527    /** MediaPlayer is temporarily pausing playback internally in order to
3528     * buffer more data.
3529     * @see android.media.MediaPlayer.OnInfoListener
3530     */
3531    public static final int MEDIA_INFO_BUFFERING_START = 701;
3532
3533    /** MediaPlayer is resuming playback after filling buffers.
3534     * @see android.media.MediaPlayer.OnInfoListener
3535     */
3536    public static final int MEDIA_INFO_BUFFERING_END = 702;
3537
3538    /** Estimated network bandwidth information (kbps) is available; currently this event fires
3539     * simultaneously as {@link #MEDIA_INFO_BUFFERING_START} and {@link #MEDIA_INFO_BUFFERING_END}
3540     * when playing network files.
3541     * @see android.media.MediaPlayer.OnInfoListener
3542     * @hide
3543     */
3544    public static final int MEDIA_INFO_NETWORK_BANDWIDTH = 703;
3545
3546    /** Bad interleaving means that a media has been improperly interleaved or
3547     * not interleaved at all, e.g has all the video samples first then all the
3548     * audio ones. Video is playing but a lot of disk seeks may be happening.
3549     * @see android.media.MediaPlayer.OnInfoListener
3550     */
3551    public static final int MEDIA_INFO_BAD_INTERLEAVING = 800;
3552
3553    /** The media cannot be seeked (e.g live stream)
3554     * @see android.media.MediaPlayer.OnInfoListener
3555     */
3556    public static final int MEDIA_INFO_NOT_SEEKABLE = 801;
3557
3558    /** A new set of metadata is available.
3559     * @see android.media.MediaPlayer.OnInfoListener
3560     */
3561    public static final int MEDIA_INFO_METADATA_UPDATE = 802;
3562
3563    /** A new set of external-only metadata is available.  Used by
3564     *  JAVA framework to avoid triggering track scanning.
3565     * @hide
3566     */
3567    public static final int MEDIA_INFO_EXTERNAL_METADATA_UPDATE = 803;
3568
3569    /** Failed to handle timed text track properly.
3570     * @see android.media.MediaPlayer.OnInfoListener
3571     *
3572     * {@hide}
3573     */
3574    public static final int MEDIA_INFO_TIMED_TEXT_ERROR = 900;
3575
3576    /** Subtitle track was not supported by the media framework.
3577     * @see android.media.MediaPlayer.OnInfoListener
3578     */
3579    public static final int MEDIA_INFO_UNSUPPORTED_SUBTITLE = 901;
3580
3581    /** Reading the subtitle track takes too long.
3582     * @see android.media.MediaPlayer.OnInfoListener
3583     */
3584    public static final int MEDIA_INFO_SUBTITLE_TIMED_OUT = 902;
3585
3586    /**
3587     * Interface definition of a callback to be invoked to communicate some
3588     * info and/or warning about the media or its playback.
3589     */
3590    public interface OnInfoListener
3591    {
3592        /**
3593         * Called to indicate an info or a warning.
3594         *
3595         * @param mp      the MediaPlayer the info pertains to.
3596         * @param what    the type of info or warning.
3597         * <ul>
3598         * <li>{@link #MEDIA_INFO_UNKNOWN}
3599         * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING}
3600         * <li>{@link #MEDIA_INFO_VIDEO_RENDERING_START}
3601         * <li>{@link #MEDIA_INFO_BUFFERING_START}
3602         * <li>{@link #MEDIA_INFO_BUFFERING_END}
3603         * <li><code>MEDIA_INFO_NETWORK_BANDWIDTH (703)</code> -
3604         *     bandwidth information is available (as <code>extra</code> kbps)
3605         * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING}
3606         * <li>{@link #MEDIA_INFO_NOT_SEEKABLE}
3607         * <li>{@link #MEDIA_INFO_METADATA_UPDATE}
3608         * <li>{@link #MEDIA_INFO_UNSUPPORTED_SUBTITLE}
3609         * <li>{@link #MEDIA_INFO_SUBTITLE_TIMED_OUT}
3610         * </ul>
3611         * @param extra an extra code, specific to the info. Typically
3612         * implementation dependent.
3613         * @return True if the method handled the info, false if it didn't.
3614         * Returning false, or not having an OnInfoListener at all, will
3615         * cause the info to be discarded.
3616         */
3617        boolean onInfo(MediaPlayer mp, int what, int extra);
3618    }
3619
3620    /**
3621     * Register a callback to be invoked when an info/warning is available.
3622     *
3623     * @param listener the callback that will be run
3624     */
3625    public void setOnInfoListener(OnInfoListener listener)
3626    {
3627        mOnInfoListener = listener;
3628    }
3629
3630    private OnInfoListener mOnInfoListener;
3631
3632    /*
3633     * Test whether a given video scaling mode is supported.
3634     */
3635    private boolean isVideoScalingModeSupported(int mode) {
3636        return (mode == VIDEO_SCALING_MODE_SCALE_TO_FIT ||
3637                mode == VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING);
3638    }
3639
3640    /** @hide */
3641    static class TimeProvider implements MediaPlayer.OnSeekCompleteListener,
3642            MediaTimeProvider {
3643        private static final String TAG = "MTP";
3644        private static final long MAX_NS_WITHOUT_POSITION_CHECK = 5000000000L;
3645        private static final long MAX_EARLY_CALLBACK_US = 1000;
3646        private static final long TIME_ADJUSTMENT_RATE = 2;  /* meaning 1/2 */
3647        private long mLastTimeUs = 0;
3648        private MediaPlayer mPlayer;
3649        private boolean mPaused = true;
3650        private boolean mStopped = true;
3651        private boolean mBuffering;
3652        private long mLastReportedTime;
3653        private long mTimeAdjustment;
3654        // since we are expecting only a handful listeners per stream, there is
3655        // no need for log(N) search performance
3656        private MediaTimeProvider.OnMediaTimeListener mListeners[];
3657        private long mTimes[];
3658        private long mLastNanoTime;
3659        private Handler mEventHandler;
3660        private boolean mRefresh = false;
3661        private boolean mPausing = false;
3662        private boolean mSeeking = false;
3663        private static final int NOTIFY = 1;
3664        private static final int NOTIFY_TIME = 0;
3665        private static final int REFRESH_AND_NOTIFY_TIME = 1;
3666        private static final int NOTIFY_STOP = 2;
3667        private static final int NOTIFY_SEEK = 3;
3668        private static final int NOTIFY_TRACK_DATA = 4;
3669        private HandlerThread mHandlerThread;
3670
3671        /** @hide */
3672        public boolean DEBUG = false;
3673
3674        public TimeProvider(MediaPlayer mp) {
3675            mPlayer = mp;
3676            try {
3677                getCurrentTimeUs(true, false);
3678            } catch (IllegalStateException e) {
3679                // we assume starting position
3680                mRefresh = true;
3681            }
3682
3683            Looper looper;
3684            if ((looper = Looper.myLooper()) == null &&
3685                (looper = Looper.getMainLooper()) == null) {
3686                // Create our own looper here in case MP was created without one
3687                mHandlerThread = new HandlerThread("MediaPlayerMTPEventThread",
3688                      Process.THREAD_PRIORITY_FOREGROUND);
3689                mHandlerThread.start();
3690                looper = mHandlerThread.getLooper();
3691            }
3692            mEventHandler = new EventHandler(looper);
3693
3694            mListeners = new MediaTimeProvider.OnMediaTimeListener[0];
3695            mTimes = new long[0];
3696            mLastTimeUs = 0;
3697            mTimeAdjustment = 0;
3698        }
3699
3700        private void scheduleNotification(int type, long delayUs) {
3701            // ignore time notifications until seek is handled
3702            if (mSeeking &&
3703                    (type == NOTIFY_TIME || type == REFRESH_AND_NOTIFY_TIME)) {
3704                return;
3705            }
3706
3707            if (DEBUG) Log.v(TAG, "scheduleNotification " + type + " in " + delayUs);
3708            mEventHandler.removeMessages(NOTIFY);
3709            Message msg = mEventHandler.obtainMessage(NOTIFY, type, 0);
3710            mEventHandler.sendMessageDelayed(msg, (int) (delayUs / 1000));
3711        }
3712
3713        /** @hide */
3714        public void close() {
3715            mEventHandler.removeMessages(NOTIFY);
3716            if (mHandlerThread != null) {
3717                mHandlerThread.quitSafely();
3718                mHandlerThread = null;
3719            }
3720        }
3721
3722        /** @hide */
3723        protected void finalize() {
3724            if (mHandlerThread != null) {
3725                mHandlerThread.quitSafely();
3726            }
3727        }
3728
3729        /** @hide */
3730        public void onPaused(boolean paused) {
3731            synchronized(this) {
3732                if (DEBUG) Log.d(TAG, "onPaused: " + paused);
3733                if (mStopped) { // handle as seek if we were stopped
3734                    mStopped = false;
3735                    mSeeking = true;
3736                    scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
3737                } else {
3738                    mPausing = paused;  // special handling if player disappeared
3739                    mSeeking = false;
3740                    scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
3741                }
3742            }
3743        }
3744
3745        /** @hide */
3746        public void onBuffering(boolean buffering) {
3747            synchronized (this) {
3748                if (DEBUG) Log.d(TAG, "onBuffering: " + buffering);
3749                mBuffering = buffering;
3750                scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
3751            }
3752        }
3753
3754        /** @hide */
3755        public void onStopped() {
3756            synchronized(this) {
3757                if (DEBUG) Log.d(TAG, "onStopped");
3758                mPaused = true;
3759                mStopped = true;
3760                mSeeking = false;
3761                mBuffering = false;
3762                scheduleNotification(NOTIFY_STOP, 0 /* delay */);
3763            }
3764        }
3765
3766        /** @hide */
3767        @Override
3768        public void onSeekComplete(MediaPlayer mp) {
3769            synchronized(this) {
3770                mStopped = false;
3771                mSeeking = true;
3772                scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
3773            }
3774        }
3775
3776        /** @hide */
3777        public void onNewPlayer() {
3778            if (mRefresh) {
3779                synchronized(this) {
3780                    mStopped = false;
3781                    mSeeking = true;
3782                    mBuffering = false;
3783                    scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
3784                }
3785            }
3786        }
3787
3788        private synchronized void notifySeek() {
3789            mSeeking = false;
3790            try {
3791                long timeUs = getCurrentTimeUs(true, false);
3792                if (DEBUG) Log.d(TAG, "onSeekComplete at " + timeUs);
3793
3794                for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) {
3795                    if (listener == null) {
3796                        break;
3797                    }
3798                    listener.onSeek(timeUs);
3799                }
3800            } catch (IllegalStateException e) {
3801                // we should not be there, but at least signal pause
3802                if (DEBUG) Log.d(TAG, "onSeekComplete but no player");
3803                mPausing = true;  // special handling if player disappeared
3804                notifyTimedEvent(false /* refreshTime */);
3805            }
3806        }
3807
3808        private synchronized void notifyTrackData(Pair<SubtitleTrack, byte[]> trackData) {
3809            SubtitleTrack track = trackData.first;
3810            byte[] data = trackData.second;
3811            track.onData(data, true /* eos */, ~0 /* runID: keep forever */);
3812        }
3813
3814        private synchronized void notifyStop() {
3815            for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) {
3816                if (listener == null) {
3817                    break;
3818                }
3819                listener.onStop();
3820            }
3821        }
3822
3823        private int registerListener(MediaTimeProvider.OnMediaTimeListener listener) {
3824            int i = 0;
3825            for (; i < mListeners.length; i++) {
3826                if (mListeners[i] == listener || mListeners[i] == null) {
3827                    break;
3828                }
3829            }
3830
3831            // new listener
3832            if (i >= mListeners.length) {
3833                MediaTimeProvider.OnMediaTimeListener[] newListeners =
3834                    new MediaTimeProvider.OnMediaTimeListener[i + 1];
3835                long[] newTimes = new long[i + 1];
3836                System.arraycopy(mListeners, 0, newListeners, 0, mListeners.length);
3837                System.arraycopy(mTimes, 0, newTimes, 0, mTimes.length);
3838                mListeners = newListeners;
3839                mTimes = newTimes;
3840            }
3841
3842            if (mListeners[i] == null) {
3843                mListeners[i] = listener;
3844                mTimes[i] = MediaTimeProvider.NO_TIME;
3845            }
3846            return i;
3847        }
3848
3849        public void notifyAt(
3850                long timeUs, MediaTimeProvider.OnMediaTimeListener listener) {
3851            synchronized(this) {
3852                if (DEBUG) Log.d(TAG, "notifyAt " + timeUs);
3853                mTimes[registerListener(listener)] = timeUs;
3854                scheduleNotification(NOTIFY_TIME, 0 /* delay */);
3855            }
3856        }
3857
3858        public void scheduleUpdate(MediaTimeProvider.OnMediaTimeListener listener) {
3859            synchronized(this) {
3860                if (DEBUG) Log.d(TAG, "scheduleUpdate");
3861                int i = registerListener(listener);
3862
3863                if (!mStopped) {
3864                    mTimes[i] = 0;
3865                    scheduleNotification(NOTIFY_TIME, 0 /* delay */);
3866                }
3867            }
3868        }
3869
3870        public void cancelNotifications(
3871                MediaTimeProvider.OnMediaTimeListener listener) {
3872            synchronized(this) {
3873                int i = 0;
3874                for (; i < mListeners.length; i++) {
3875                    if (mListeners[i] == listener) {
3876                        System.arraycopy(mListeners, i + 1,
3877                                mListeners, i, mListeners.length - i - 1);
3878                        System.arraycopy(mTimes, i + 1,
3879                                mTimes, i, mTimes.length - i - 1);
3880                        mListeners[mListeners.length - 1] = null;
3881                        mTimes[mTimes.length - 1] = NO_TIME;
3882                        break;
3883                    } else if (mListeners[i] == null) {
3884                        break;
3885                    }
3886                }
3887
3888                scheduleNotification(NOTIFY_TIME, 0 /* delay */);
3889            }
3890        }
3891
3892        private synchronized void notifyTimedEvent(boolean refreshTime) {
3893            // figure out next callback
3894            long nowUs;
3895            try {
3896                nowUs = getCurrentTimeUs(refreshTime, true);
3897            } catch (IllegalStateException e) {
3898                // assume we paused until new player arrives
3899                mRefresh = true;
3900                mPausing = true; // this ensures that call succeeds
3901                nowUs = getCurrentTimeUs(refreshTime, true);
3902            }
3903            long nextTimeUs = nowUs;
3904
3905            if (mSeeking) {
3906                // skip timed-event notifications until seek is complete
3907                return;
3908            }
3909
3910            if (DEBUG) {
3911                StringBuilder sb = new StringBuilder();
3912                sb.append("notifyTimedEvent(").append(mLastTimeUs).append(" -> ")
3913                        .append(nowUs).append(") from {");
3914                boolean first = true;
3915                for (long time: mTimes) {
3916                    if (time == NO_TIME) {
3917                        continue;
3918                    }
3919                    if (!first) sb.append(", ");
3920                    sb.append(time);
3921                    first = false;
3922                }
3923                sb.append("}");
3924                Log.d(TAG, sb.toString());
3925            }
3926
3927            Vector<MediaTimeProvider.OnMediaTimeListener> activatedListeners =
3928                new Vector<MediaTimeProvider.OnMediaTimeListener>();
3929            for (int ix = 0; ix < mTimes.length; ix++) {
3930                if (mListeners[ix] == null) {
3931                    break;
3932                }
3933                if (mTimes[ix] <= NO_TIME) {
3934                    // ignore, unless we were stopped
3935                } else if (mTimes[ix] <= nowUs + MAX_EARLY_CALLBACK_US) {
3936                    activatedListeners.add(mListeners[ix]);
3937                    if (DEBUG) Log.d(TAG, "removed");
3938                    mTimes[ix] = NO_TIME;
3939                } else if (nextTimeUs == nowUs || mTimes[ix] < nextTimeUs) {
3940                    nextTimeUs = mTimes[ix];
3941                }
3942            }
3943
3944            if (nextTimeUs > nowUs && !mPaused) {
3945                // schedule callback at nextTimeUs
3946                if (DEBUG) Log.d(TAG, "scheduling for " + nextTimeUs + " and " + nowUs);
3947                scheduleNotification(NOTIFY_TIME, nextTimeUs - nowUs);
3948            } else {
3949                mEventHandler.removeMessages(NOTIFY);
3950                // no more callbacks
3951            }
3952
3953            for (MediaTimeProvider.OnMediaTimeListener listener: activatedListeners) {
3954                listener.onTimedEvent(nowUs);
3955            }
3956        }
3957
3958        private long getEstimatedTime(long nanoTime, boolean monotonic) {
3959            if (mPaused) {
3960                mLastReportedTime = mLastTimeUs + mTimeAdjustment;
3961            } else {
3962                long timeSinceRead = (nanoTime - mLastNanoTime) / 1000;
3963                mLastReportedTime = mLastTimeUs + timeSinceRead;
3964                if (mTimeAdjustment > 0) {
3965                    long adjustment =
3966                        mTimeAdjustment - timeSinceRead / TIME_ADJUSTMENT_RATE;
3967                    if (adjustment <= 0) {
3968                        mTimeAdjustment = 0;
3969                    } else {
3970                        mLastReportedTime += adjustment;
3971                    }
3972                }
3973            }
3974            return mLastReportedTime;
3975        }
3976
3977        public long getCurrentTimeUs(boolean refreshTime, boolean monotonic)
3978                throws IllegalStateException {
3979            synchronized (this) {
3980                // we always refresh the time when the paused-state changes, because
3981                // we expect to have received the pause-change event delayed.
3982                if (mPaused && !refreshTime) {
3983                    return mLastReportedTime;
3984                }
3985
3986                long nanoTime = System.nanoTime();
3987                if (refreshTime ||
3988                        nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) {
3989                    try {
3990                        mLastTimeUs = mPlayer.getCurrentPosition() * 1000L;
3991                        mPaused = !mPlayer.isPlaying() || mBuffering;
3992                        if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs);
3993                    } catch (IllegalStateException e) {
3994                        if (mPausing) {
3995                            // if we were pausing, get last estimated timestamp
3996                            mPausing = false;
3997                            getEstimatedTime(nanoTime, monotonic);
3998                            mPaused = true;
3999                            if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime);
4000                            return mLastReportedTime;
4001                        }
4002                        // TODO get time when prepared
4003                        throw e;
4004                    }
4005                    mLastNanoTime = nanoTime;
4006                    if (monotonic && mLastTimeUs < mLastReportedTime) {
4007                        /* have to adjust time */
4008                        mTimeAdjustment = mLastReportedTime - mLastTimeUs;
4009                        if (mTimeAdjustment > 1000000) {
4010                            // schedule seeked event if time jumped significantly
4011                            // TODO: do this properly by introducing an exception
4012                            mStopped = false;
4013                            mSeeking = true;
4014                            scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
4015                        }
4016                    } else {
4017                        mTimeAdjustment = 0;
4018                    }
4019                }
4020
4021                return getEstimatedTime(nanoTime, monotonic);
4022            }
4023        }
4024
4025        private class EventHandler extends Handler {
4026            public EventHandler(Looper looper) {
4027                super(looper);
4028            }
4029
4030            @Override
4031            public void handleMessage(Message msg) {
4032                if (msg.what == NOTIFY) {
4033                    switch (msg.arg1) {
4034                    case NOTIFY_TIME:
4035                        notifyTimedEvent(false /* refreshTime */);
4036                        break;
4037                    case REFRESH_AND_NOTIFY_TIME:
4038                        notifyTimedEvent(true /* refreshTime */);
4039                        break;
4040                    case NOTIFY_STOP:
4041                        notifyStop();
4042                        break;
4043                    case NOTIFY_SEEK:
4044                        notifySeek();
4045                        break;
4046                    case NOTIFY_TRACK_DATA:
4047                        notifyTrackData((Pair<SubtitleTrack, byte[]>)msg.obj);
4048                        break;
4049                    }
4050                }
4051            }
4052        }
4053    }
4054}
4055