MediaPlayer.java revision a460ea1a2c178a2c1816b1a983333166726ad579
1/* 2 * Copyright (C) 2006 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media; 18 19import android.annotation.IntDef; 20import android.annotation.NonNull; 21import android.annotation.Nullable; 22import android.app.ActivityThread; 23import android.app.AppOpsManager; 24import android.content.ContentResolver; 25import android.content.Context; 26import android.content.res.AssetFileDescriptor; 27import android.net.Uri; 28import android.os.Handler; 29import android.os.HandlerThread; 30import android.os.IBinder; 31import android.os.Looper; 32import android.os.Message; 33import android.os.Parcel; 34import android.os.Parcelable; 35import android.os.Process; 36import android.os.PowerManager; 37import android.os.RemoteException; 38import android.os.ServiceManager; 39import android.provider.Settings; 40import android.system.ErrnoException; 41import android.system.OsConstants; 42import android.util.Log; 43import android.util.Pair; 44import android.view.Surface; 45import android.view.SurfaceHolder; 46import android.widget.VideoView; 47import android.graphics.SurfaceTexture; 48import android.media.AudioManager; 49import android.media.MediaFormat; 50import android.media.MediaTimeProvider; 51import android.media.PlaybackParams; 52import android.media.SubtitleController; 53import android.media.SubtitleController.Anchor; 54import android.media.SubtitleData; 55import android.media.SubtitleTrack.RenderingWidget; 56import android.media.SyncParams; 57 58import com.android.internal.app.IAppOpsService; 59 60import libcore.io.IoBridge; 61import libcore.io.Libcore; 62 63import java.io.ByteArrayOutputStream; 64import java.io.File; 65import java.io.FileDescriptor; 66import java.io.FileInputStream; 67import java.io.FileNotFoundException; 68import java.io.IOException; 69import java.io.InputStream; 70import java.lang.Runnable; 71import java.lang.annotation.Retention; 72import java.lang.annotation.RetentionPolicy; 73import java.net.InetSocketAddress; 74import java.util.BitSet; 75import java.util.HashSet; 76import java.util.Map; 77import java.util.Scanner; 78import java.util.Set; 79import java.util.Vector; 80import java.lang.ref.WeakReference; 81 82/** 83 * MediaPlayer class can be used to control playback 84 * of audio/video files and streams. An example on how to use the methods in 85 * this class can be found in {@link android.widget.VideoView}. 86 * 87 * <p>Topics covered here are: 88 * <ol> 89 * <li><a href="#StateDiagram">State Diagram</a> 90 * <li><a href="#Valid_and_Invalid_States">Valid and Invalid States</a> 91 * <li><a href="#Permissions">Permissions</a> 92 * <li><a href="#Callbacks">Register informational and error callbacks</a> 93 * </ol> 94 * 95 * <div class="special reference"> 96 * <h3>Developer Guides</h3> 97 * <p>For more information about how to use MediaPlayer, read the 98 * <a href="{@docRoot}guide/topics/media/mediaplayer.html">Media Playback</a> developer guide.</p> 99 * </div> 100 * 101 * <a name="StateDiagram"></a> 102 * <h3>State Diagram</h3> 103 * 104 * <p>Playback control of audio/video files and streams is managed as a state 105 * machine. The following diagram shows the life cycle and the states of a 106 * MediaPlayer object driven by the supported playback control operations. 107 * The ovals represent the states a MediaPlayer object may reside 108 * in. The arcs represent the playback control operations that drive the object 109 * state transition. There are two types of arcs. The arcs with a single arrow 110 * head represent synchronous method calls, while those with 111 * a double arrow head represent asynchronous method calls.</p> 112 * 113 * <p><img src="../../../images/mediaplayer_state_diagram.gif" 114 * alt="MediaPlayer State diagram" 115 * border="0" /></p> 116 * 117 * <p>From this state diagram, one can see that a MediaPlayer object has the 118 * following states:</p> 119 * <ul> 120 * <li>When a MediaPlayer object is just created using <code>new</code> or 121 * after {@link #reset()} is called, it is in the <em>Idle</em> state; and after 122 * {@link #release()} is called, it is in the <em>End</em> state. Between these 123 * two states is the life cycle of the MediaPlayer object. 124 * <ul> 125 * <li>There is a subtle but important difference between a newly constructed 126 * MediaPlayer object and the MediaPlayer object after {@link #reset()} 127 * is called. It is a programming error to invoke methods such 128 * as {@link #getCurrentPosition()}, 129 * {@link #getDuration()}, {@link #getVideoHeight()}, 130 * {@link #getVideoWidth()}, {@link #setAudioStreamType(int)}, 131 * {@link #setLooping(boolean)}, 132 * {@link #setVolume(float, float)}, {@link #pause()}, {@link #start()}, 133 * {@link #stop()}, {@link #seekTo(int)}, {@link #prepare()} or 134 * {@link #prepareAsync()} in the <em>Idle</em> state for both cases. If any of these 135 * methods is called right after a MediaPlayer object is constructed, 136 * the user supplied callback method OnErrorListener.onError() won't be 137 * called by the internal player engine and the object state remains 138 * unchanged; but if these methods are called right after {@link #reset()}, 139 * the user supplied callback method OnErrorListener.onError() will be 140 * invoked by the internal player engine and the object will be 141 * transfered to the <em>Error</em> state. </li> 142 * <li>It is also recommended that once 143 * a MediaPlayer object is no longer being used, call {@link #release()} immediately 144 * so that resources used by the internal player engine associated with the 145 * MediaPlayer object can be released immediately. Resource may include 146 * singleton resources such as hardware acceleration components and 147 * failure to call {@link #release()} may cause subsequent instances of 148 * MediaPlayer objects to fallback to software implementations or fail 149 * altogether. Once the MediaPlayer 150 * object is in the <em>End</em> state, it can no longer be used and 151 * there is no way to bring it back to any other state. </li> 152 * <li>Furthermore, 153 * the MediaPlayer objects created using <code>new</code> is in the 154 * <em>Idle</em> state, while those created with one 155 * of the overloaded convenient <code>create</code> methods are <em>NOT</em> 156 * in the <em>Idle</em> state. In fact, the objects are in the <em>Prepared</em> 157 * state if the creation using <code>create</code> method is successful. 158 * </li> 159 * </ul> 160 * </li> 161 * <li>In general, some playback control operation may fail due to various 162 * reasons, such as unsupported audio/video format, poorly interleaved 163 * audio/video, resolution too high, streaming timeout, and the like. 164 * Thus, error reporting and recovery is an important concern under 165 * these circumstances. Sometimes, due to programming errors, invoking a playback 166 * control operation in an invalid state may also occur. Under all these 167 * error conditions, the internal player engine invokes a user supplied 168 * OnErrorListener.onError() method if an OnErrorListener has been 169 * registered beforehand via 170 * {@link #setOnErrorListener(android.media.MediaPlayer.OnErrorListener)}. 171 * <ul> 172 * <li>It is important to note that once an error occurs, the 173 * MediaPlayer object enters the <em>Error</em> state (except as noted 174 * above), even if an error listener has not been registered by the application.</li> 175 * <li>In order to reuse a MediaPlayer object that is in the <em> 176 * Error</em> state and recover from the error, 177 * {@link #reset()} can be called to restore the object to its <em>Idle</em> 178 * state.</li> 179 * <li>It is good programming practice to have your application 180 * register a OnErrorListener to look out for error notifications from 181 * the internal player engine.</li> 182 * <li>IllegalStateException is 183 * thrown to prevent programming errors such as calling {@link #prepare()}, 184 * {@link #prepareAsync()}, or one of the overloaded <code>setDataSource 185 * </code> methods in an invalid state. </li> 186 * </ul> 187 * </li> 188 * <li>Calling 189 * {@link #setDataSource(FileDescriptor)}, or 190 * {@link #setDataSource(String)}, or 191 * {@link #setDataSource(Context, Uri)}, or 192 * {@link #setDataSource(FileDescriptor, long, long)}, or 193 * {@link #setDataSource(MediaDataSource)} transfers a 194 * MediaPlayer object in the <em>Idle</em> state to the 195 * <em>Initialized</em> state. 196 * <ul> 197 * <li>An IllegalStateException is thrown if 198 * setDataSource() is called in any other state.</li> 199 * <li>It is good programming 200 * practice to always look out for <code>IllegalArgumentException</code> 201 * and <code>IOException</code> that may be thrown from the overloaded 202 * <code>setDataSource</code> methods.</li> 203 * </ul> 204 * </li> 205 * <li>A MediaPlayer object must first enter the <em>Prepared</em> state 206 * before playback can be started. 207 * <ul> 208 * <li>There are two ways (synchronous vs. 209 * asynchronous) that the <em>Prepared</em> state can be reached: 210 * either a call to {@link #prepare()} (synchronous) which 211 * transfers the object to the <em>Prepared</em> state once the method call 212 * returns, or a call to {@link #prepareAsync()} (asynchronous) which 213 * first transfers the object to the <em>Preparing</em> state after the 214 * call returns (which occurs almost right way) while the internal 215 * player engine continues working on the rest of preparation work 216 * until the preparation work completes. When the preparation completes or when {@link #prepare()} call returns, 217 * the internal player engine then calls a user supplied callback method, 218 * onPrepared() of the OnPreparedListener interface, if an 219 * OnPreparedListener is registered beforehand via {@link 220 * #setOnPreparedListener(android.media.MediaPlayer.OnPreparedListener)}.</li> 221 * <li>It is important to note that 222 * the <em>Preparing</em> state is a transient state, and the behavior 223 * of calling any method with side effect while a MediaPlayer object is 224 * in the <em>Preparing</em> state is undefined.</li> 225 * <li>An IllegalStateException is 226 * thrown if {@link #prepare()} or {@link #prepareAsync()} is called in 227 * any other state.</li> 228 * <li>While in the <em>Prepared</em> state, properties 229 * such as audio/sound volume, screenOnWhilePlaying, looping can be 230 * adjusted by invoking the corresponding set methods.</li> 231 * </ul> 232 * </li> 233 * <li>To start the playback, {@link #start()} must be called. After 234 * {@link #start()} returns successfully, the MediaPlayer object is in the 235 * <em>Started</em> state. {@link #isPlaying()} can be called to test 236 * whether the MediaPlayer object is in the <em>Started</em> state. 237 * <ul> 238 * <li>While in the <em>Started</em> state, the internal player engine calls 239 * a user supplied OnBufferingUpdateListener.onBufferingUpdate() callback 240 * method if a OnBufferingUpdateListener has been registered beforehand 241 * via {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}. 242 * This callback allows applications to keep track of the buffering status 243 * while streaming audio/video.</li> 244 * <li>Calling {@link #start()} has not effect 245 * on a MediaPlayer object that is already in the <em>Started</em> state.</li> 246 * </ul> 247 * </li> 248 * <li>Playback can be paused and stopped, and the current playback position 249 * can be adjusted. Playback can be paused via {@link #pause()}. When the call to 250 * {@link #pause()} returns, the MediaPlayer object enters the 251 * <em>Paused</em> state. Note that the transition from the <em>Started</em> 252 * state to the <em>Paused</em> state and vice versa happens 253 * asynchronously in the player engine. It may take some time before 254 * the state is updated in calls to {@link #isPlaying()}, and it can be 255 * a number of seconds in the case of streamed content. 256 * <ul> 257 * <li>Calling {@link #start()} to resume playback for a paused 258 * MediaPlayer object, and the resumed playback 259 * position is the same as where it was paused. When the call to 260 * {@link #start()} returns, the paused MediaPlayer object goes back to 261 * the <em>Started</em> state.</li> 262 * <li>Calling {@link #pause()} has no effect on 263 * a MediaPlayer object that is already in the <em>Paused</em> state.</li> 264 * </ul> 265 * </li> 266 * <li>Calling {@link #stop()} stops playback and causes a 267 * MediaPlayer in the <em>Started</em>, <em>Paused</em>, <em>Prepared 268 * </em> or <em>PlaybackCompleted</em> state to enter the 269 * <em>Stopped</em> state. 270 * <ul> 271 * <li>Once in the <em>Stopped</em> state, playback cannot be started 272 * until {@link #prepare()} or {@link #prepareAsync()} are called to set 273 * the MediaPlayer object to the <em>Prepared</em> state again.</li> 274 * <li>Calling {@link #stop()} has no effect on a MediaPlayer 275 * object that is already in the <em>Stopped</em> state.</li> 276 * </ul> 277 * </li> 278 * <li>The playback position can be adjusted with a call to 279 * {@link #seekTo(int)}. 280 * <ul> 281 * <li>Although the asynchronuous {@link #seekTo(int)} 282 * call returns right way, the actual seek operation may take a while to 283 * finish, especially for audio/video being streamed. When the actual 284 * seek operation completes, the internal player engine calls a user 285 * supplied OnSeekComplete.onSeekComplete() if an OnSeekCompleteListener 286 * has been registered beforehand via 287 * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}.</li> 288 * <li>Please 289 * note that {@link #seekTo(int)} can also be called in the other states, 290 * such as <em>Prepared</em>, <em>Paused</em> and <em>PlaybackCompleted 291 * </em> state.</li> 292 * <li>Furthermore, the actual current playback position 293 * can be retrieved with a call to {@link #getCurrentPosition()}, which 294 * is helpful for applications such as a Music player that need to keep 295 * track of the playback progress.</li> 296 * </ul> 297 * </li> 298 * <li>When the playback reaches the end of stream, the playback completes. 299 * <ul> 300 * <li>If the looping mode was being set to <var>true</var>with 301 * {@link #setLooping(boolean)}, the MediaPlayer object shall remain in 302 * the <em>Started</em> state.</li> 303 * <li>If the looping mode was set to <var>false 304 * </var>, the player engine calls a user supplied callback method, 305 * OnCompletion.onCompletion(), if a OnCompletionListener is registered 306 * beforehand via {@link #setOnCompletionListener(OnCompletionListener)}. 307 * The invoke of the callback signals that the object is now in the <em> 308 * PlaybackCompleted</em> state.</li> 309 * <li>While in the <em>PlaybackCompleted</em> 310 * state, calling {@link #start()} can restart the playback from the 311 * beginning of the audio/video source.</li> 312 * </ul> 313 * 314 * 315 * <a name="Valid_and_Invalid_States"></a> 316 * <h3>Valid and invalid states</h3> 317 * 318 * <table border="0" cellspacing="0" cellpadding="0"> 319 * <tr><td>Method Name </p></td> 320 * <td>Valid Sates </p></td> 321 * <td>Invalid States </p></td> 322 * <td>Comments </p></td></tr> 323 * <tr><td>attachAuxEffect </p></td> 324 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 325 * <td>{Idle, Error} </p></td> 326 * <td>This method must be called after setDataSource. 327 * Calling it does not change the object state. </p></td></tr> 328 * <tr><td>getAudioSessionId </p></td> 329 * <td>any </p></td> 330 * <td>{} </p></td> 331 * <td>This method can be called in any state and calling it does not change 332 * the object state. </p></td></tr> 333 * <tr><td>getCurrentPosition </p></td> 334 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 335 * PlaybackCompleted} </p></td> 336 * <td>{Error}</p></td> 337 * <td>Successful invoke of this method in a valid state does not change the 338 * state. Calling this method in an invalid state transfers the object 339 * to the <em>Error</em> state. </p></td></tr> 340 * <tr><td>getDuration </p></td> 341 * <td>{Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 342 * <td>{Idle, Initialized, Error} </p></td> 343 * <td>Successful invoke of this method in a valid state does not change the 344 * state. Calling this method in an invalid state transfers the object 345 * to the <em>Error</em> state. </p></td></tr> 346 * <tr><td>getVideoHeight </p></td> 347 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 348 * PlaybackCompleted}</p></td> 349 * <td>{Error}</p></td> 350 * <td>Successful invoke of this method in a valid state does not change the 351 * state. Calling this method in an invalid state transfers the object 352 * to the <em>Error</em> state. </p></td></tr> 353 * <tr><td>getVideoWidth </p></td> 354 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 355 * PlaybackCompleted}</p></td> 356 * <td>{Error}</p></td> 357 * <td>Successful invoke of this method in a valid state does not change 358 * the state. Calling this method in an invalid state transfers the 359 * object to the <em>Error</em> state. </p></td></tr> 360 * <tr><td>isPlaying </p></td> 361 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 362 * PlaybackCompleted}</p></td> 363 * <td>{Error}</p></td> 364 * <td>Successful invoke of this method in a valid state does not change 365 * the state. Calling this method in an invalid state transfers the 366 * object to the <em>Error</em> state. </p></td></tr> 367 * <tr><td>pause </p></td> 368 * <td>{Started, Paused, PlaybackCompleted}</p></td> 369 * <td>{Idle, Initialized, Prepared, Stopped, Error}</p></td> 370 * <td>Successful invoke of this method in a valid state transfers the 371 * object to the <em>Paused</em> state. Calling this method in an 372 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 373 * <tr><td>prepare </p></td> 374 * <td>{Initialized, Stopped} </p></td> 375 * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td> 376 * <td>Successful invoke of this method in a valid state transfers the 377 * object to the <em>Prepared</em> state. Calling this method in an 378 * invalid state throws an IllegalStateException.</p></td></tr> 379 * <tr><td>prepareAsync </p></td> 380 * <td>{Initialized, Stopped} </p></td> 381 * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td> 382 * <td>Successful invoke of this method in a valid state transfers the 383 * object to the <em>Preparing</em> state. Calling this method in an 384 * invalid state throws an IllegalStateException.</p></td></tr> 385 * <tr><td>release </p></td> 386 * <td>any </p></td> 387 * <td>{} </p></td> 388 * <td>After {@link #release()}, the object is no longer available. </p></td></tr> 389 * <tr><td>reset </p></td> 390 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 391 * PlaybackCompleted, Error}</p></td> 392 * <td>{}</p></td> 393 * <td>After {@link #reset()}, the object is like being just created.</p></td></tr> 394 * <tr><td>seekTo </p></td> 395 * <td>{Prepared, Started, Paused, PlaybackCompleted} </p></td> 396 * <td>{Idle, Initialized, Stopped, Error}</p></td> 397 * <td>Successful invoke of this method in a valid state does not change 398 * the state. Calling this method in an invalid state transfers the 399 * object to the <em>Error</em> state. </p></td></tr> 400 * <tr><td>setAudioAttributes </p></td> 401 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 402 * PlaybackCompleted}</p></td> 403 * <td>{Error}</p></td> 404 * <td>Successful invoke of this method does not change the state. In order for the 405 * target audio attributes type to become effective, this method must be called before 406 * prepare() or prepareAsync().</p></td></tr> 407 * <tr><td>setAudioSessionId </p></td> 408 * <td>{Idle} </p></td> 409 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted, 410 * Error} </p></td> 411 * <td>This method must be called in idle state as the audio session ID must be known before 412 * calling setDataSource. Calling it does not change the object state. </p></td></tr> 413 * <tr><td>setAudioStreamType </p></td> 414 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 415 * PlaybackCompleted}</p></td> 416 * <td>{Error}</p></td> 417 * <td>Successful invoke of this method does not change the state. In order for the 418 * target audio stream type to become effective, this method must be called before 419 * prepare() or prepareAsync().</p></td></tr> 420 * <tr><td>setAuxEffectSendLevel </p></td> 421 * <td>any</p></td> 422 * <td>{} </p></td> 423 * <td>Calling this method does not change the object state. </p></td></tr> 424 * <tr><td>setDataSource </p></td> 425 * <td>{Idle} </p></td> 426 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted, 427 * Error} </p></td> 428 * <td>Successful invoke of this method in a valid state transfers the 429 * object to the <em>Initialized</em> state. Calling this method in an 430 * invalid state throws an IllegalStateException.</p></td></tr> 431 * <tr><td>setDisplay </p></td> 432 * <td>any </p></td> 433 * <td>{} </p></td> 434 * <td>This method can be called in any state and calling it does not change 435 * the object state. </p></td></tr> 436 * <tr><td>setSurface </p></td> 437 * <td>any </p></td> 438 * <td>{} </p></td> 439 * <td>This method can be called in any state and calling it does not change 440 * the object state. </p></td></tr> 441 * <tr><td>setVideoScalingMode </p></td> 442 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 443 * <td>{Idle, Error}</p></td> 444 * <td>Successful invoke of this method does not change the state.</p></td></tr> 445 * <tr><td>setLooping </p></td> 446 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 447 * PlaybackCompleted}</p></td> 448 * <td>{Error}</p></td> 449 * <td>Successful invoke of this method in a valid state does not change 450 * the state. Calling this method in an 451 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 452 * <tr><td>isLooping </p></td> 453 * <td>any </p></td> 454 * <td>{} </p></td> 455 * <td>This method can be called in any state and calling it does not change 456 * the object state. </p></td></tr> 457 * <tr><td>setOnBufferingUpdateListener </p></td> 458 * <td>any </p></td> 459 * <td>{} </p></td> 460 * <td>This method can be called in any state and calling it does not change 461 * the object state. </p></td></tr> 462 * <tr><td>setOnCompletionListener </p></td> 463 * <td>any </p></td> 464 * <td>{} </p></td> 465 * <td>This method can be called in any state and calling it does not change 466 * the object state. </p></td></tr> 467 * <tr><td>setOnErrorListener </p></td> 468 * <td>any </p></td> 469 * <td>{} </p></td> 470 * <td>This method can be called in any state and calling it does not change 471 * the object state. </p></td></tr> 472 * <tr><td>setOnPreparedListener </p></td> 473 * <td>any </p></td> 474 * <td>{} </p></td> 475 * <td>This method can be called in any state and calling it does not change 476 * the object state. </p></td></tr> 477 * <tr><td>setOnSeekCompleteListener </p></td> 478 * <td>any </p></td> 479 * <td>{} </p></td> 480 * <td>This method can be called in any state and calling it does not change 481 * the object state. </p></td></tr> 482 * <tr><td>setPlaybackRate</p></td> 483 * <td>any </p></td> 484 * <td>{} </p></td> 485 * <td>This method can be called in any state and calling it does not change 486 * the object state. </p></td></tr> 487 * <tr><td>setPlaybackParams</p></td> 488 * <td>any </p></td> 489 * <td>{} </p></td> 490 * <td>This method can be called in any state and calling it does not change 491 * the object state. </p></td></tr> 492 * <tr><td>setScreenOnWhilePlaying</></td> 493 * <td>any </p></td> 494 * <td>{} </p></td> 495 * <td>This method can be called in any state and calling it does not change 496 * the object state. </p></td></tr> 497 * <tr><td>setVolume </p></td> 498 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 499 * PlaybackCompleted}</p></td> 500 * <td>{Error}</p></td> 501 * <td>Successful invoke of this method does not change the state. 502 * <tr><td>setWakeMode </p></td> 503 * <td>any </p></td> 504 * <td>{} </p></td> 505 * <td>This method can be called in any state and calling it does not change 506 * the object state.</p></td></tr> 507 * <tr><td>start </p></td> 508 * <td>{Prepared, Started, Paused, PlaybackCompleted}</p></td> 509 * <td>{Idle, Initialized, Stopped, Error}</p></td> 510 * <td>Successful invoke of this method in a valid state transfers the 511 * object to the <em>Started</em> state. Calling this method in an 512 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 513 * <tr><td>stop </p></td> 514 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 515 * <td>{Idle, Initialized, Error}</p></td> 516 * <td>Successful invoke of this method in a valid state transfers the 517 * object to the <em>Stopped</em> state. Calling this method in an 518 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 519 * <tr><td>getTrackInfo </p></td> 520 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 521 * <td>{Idle, Initialized, Error}</p></td> 522 * <td>Successful invoke of this method does not change the state.</p></td></tr> 523 * <tr><td>addTimedTextSource </p></td> 524 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 525 * <td>{Idle, Initialized, Error}</p></td> 526 * <td>Successful invoke of this method does not change the state.</p></td></tr> 527 * <tr><td>selectTrack </p></td> 528 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 529 * <td>{Idle, Initialized, Error}</p></td> 530 * <td>Successful invoke of this method does not change the state.</p></td></tr> 531 * <tr><td>deselectTrack </p></td> 532 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 533 * <td>{Idle, Initialized, Error}</p></td> 534 * <td>Successful invoke of this method does not change the state.</p></td></tr> 535 * 536 * </table> 537 * 538 * <a name="Permissions"></a> 539 * <h3>Permissions</h3> 540 * <p>One may need to declare a corresponding WAKE_LOCK permission {@link 541 * android.R.styleable#AndroidManifestUsesPermission <uses-permission>} 542 * element. 543 * 544 * <p>This class requires the {@link android.Manifest.permission#INTERNET} permission 545 * when used with network-based content. 546 * 547 * <a name="Callbacks"></a> 548 * <h3>Callbacks</h3> 549 * <p>Applications may want to register for informational and error 550 * events in order to be informed of some internal state update and 551 * possible runtime errors during playback or streaming. Registration for 552 * these events is done by properly setting the appropriate listeners (via calls 553 * to 554 * {@link #setOnPreparedListener(OnPreparedListener)}setOnPreparedListener, 555 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}setOnVideoSizeChangedListener, 556 * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}setOnSeekCompleteListener, 557 * {@link #setOnCompletionListener(OnCompletionListener)}setOnCompletionListener, 558 * {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}setOnBufferingUpdateListener, 559 * {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener, 560 * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener, etc). 561 * In order to receive the respective callback 562 * associated with these listeners, applications are required to create 563 * MediaPlayer objects on a thread with its own Looper running (main UI 564 * thread by default has a Looper running). 565 * 566 */ 567public class MediaPlayer implements SubtitleController.Listener 568{ 569 /** 570 Constant to retrieve only the new metadata since the last 571 call. 572 // FIXME: unhide. 573 // FIXME: add link to getMetadata(boolean, boolean) 574 {@hide} 575 */ 576 public static final boolean METADATA_UPDATE_ONLY = true; 577 578 /** 579 Constant to retrieve all the metadata. 580 // FIXME: unhide. 581 // FIXME: add link to getMetadata(boolean, boolean) 582 {@hide} 583 */ 584 public static final boolean METADATA_ALL = false; 585 586 /** 587 Constant to enable the metadata filter during retrieval. 588 // FIXME: unhide. 589 // FIXME: add link to getMetadata(boolean, boolean) 590 {@hide} 591 */ 592 public static final boolean APPLY_METADATA_FILTER = true; 593 594 /** 595 Constant to disable the metadata filter during retrieval. 596 // FIXME: unhide. 597 // FIXME: add link to getMetadata(boolean, boolean) 598 {@hide} 599 */ 600 public static final boolean BYPASS_METADATA_FILTER = false; 601 602 static { 603 System.loadLibrary("media_jni"); 604 native_init(); 605 } 606 607 private final static String TAG = "MediaPlayer"; 608 // Name of the remote interface for the media player. Must be kept 609 // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE 610 // macro invocation in IMediaPlayer.cpp 611 private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer"; 612 613 private long mNativeContext; // accessed by native methods 614 private long mNativeSurfaceTexture; // accessed by native methods 615 private int mListenerContext; // accessed by native methods 616 private SurfaceHolder mSurfaceHolder; 617 private EventHandler mEventHandler; 618 private PowerManager.WakeLock mWakeLock = null; 619 private boolean mScreenOnWhilePlaying; 620 private boolean mStayAwake; 621 private final IAppOpsService mAppOps; 622 private int mStreamType = AudioManager.USE_DEFAULT_STREAM_TYPE; 623 private int mUsage = -1; 624 private boolean mBypassInterruptionPolicy; 625 626 /** 627 * Default constructor. Consider using one of the create() methods for 628 * synchronously instantiating a MediaPlayer from a Uri or resource. 629 * <p>When done with the MediaPlayer, you should call {@link #release()}, 630 * to free the resources. If not released, too many MediaPlayer instances may 631 * result in an exception.</p> 632 */ 633 public MediaPlayer() { 634 635 Looper looper; 636 if ((looper = Looper.myLooper()) != null) { 637 mEventHandler = new EventHandler(this, looper); 638 } else if ((looper = Looper.getMainLooper()) != null) { 639 mEventHandler = new EventHandler(this, looper); 640 } else { 641 mEventHandler = null; 642 } 643 644 mTimeProvider = new TimeProvider(this); 645 mOpenSubtitleSources = new Vector<InputStream>(); 646 IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE); 647 mAppOps = IAppOpsService.Stub.asInterface(b); 648 649 /* Native setup requires a weak reference to our object. 650 * It's easier to create it here than in C++. 651 */ 652 native_setup(new WeakReference<MediaPlayer>(this)); 653 } 654 655 /* 656 * Update the MediaPlayer SurfaceTexture. 657 * Call after setting a new display surface. 658 */ 659 private native void _setVideoSurface(Surface surface); 660 661 /* Do not change these values (starting with INVOKE_ID) without updating 662 * their counterparts in include/media/mediaplayer.h! 663 */ 664 private static final int INVOKE_ID_GET_TRACK_INFO = 1; 665 private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE = 2; 666 private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3; 667 private static final int INVOKE_ID_SELECT_TRACK = 4; 668 private static final int INVOKE_ID_DESELECT_TRACK = 5; 669 private static final int INVOKE_ID_SET_VIDEO_SCALE_MODE = 6; 670 private static final int INVOKE_ID_GET_SELECTED_TRACK = 7; 671 672 /** 673 * Create a request parcel which can be routed to the native media 674 * player using {@link #invoke(Parcel, Parcel)}. The Parcel 675 * returned has the proper InterfaceToken set. The caller should 676 * not overwrite that token, i.e it can only append data to the 677 * Parcel. 678 * 679 * @return A parcel suitable to hold a request for the native 680 * player. 681 * {@hide} 682 */ 683 public Parcel newRequest() { 684 Parcel parcel = Parcel.obtain(); 685 parcel.writeInterfaceToken(IMEDIA_PLAYER); 686 return parcel; 687 } 688 689 /** 690 * Invoke a generic method on the native player using opaque 691 * parcels for the request and reply. Both payloads' format is a 692 * convention between the java caller and the native player. 693 * Must be called after setDataSource to make sure a native player 694 * exists. On failure, a RuntimeException is thrown. 695 * 696 * @param request Parcel with the data for the extension. The 697 * caller must use {@link #newRequest()} to get one. 698 * 699 * @param reply Output parcel with the data returned by the 700 * native player. 701 * {@hide} 702 */ 703 public void invoke(Parcel request, Parcel reply) { 704 int retcode = native_invoke(request, reply); 705 reply.setDataPosition(0); 706 if (retcode != 0) { 707 throw new RuntimeException("failure code: " + retcode); 708 } 709 } 710 711 /** 712 * Sets the {@link SurfaceHolder} to use for displaying the video 713 * portion of the media. 714 * 715 * Either a surface holder or surface must be set if a display or video sink 716 * is needed. Not calling this method or {@link #setSurface(Surface)} 717 * when playing back a video will result in only the audio track being played. 718 * A null surface holder or surface will result in only the audio track being 719 * played. 720 * 721 * @param sh the SurfaceHolder to use for video display 722 */ 723 public void setDisplay(SurfaceHolder sh) { 724 mSurfaceHolder = sh; 725 Surface surface; 726 if (sh != null) { 727 surface = sh.getSurface(); 728 } else { 729 surface = null; 730 } 731 _setVideoSurface(surface); 732 updateSurfaceScreenOn(); 733 } 734 735 /** 736 * Sets the {@link Surface} to be used as the sink for the video portion of 737 * the media. This is similar to {@link #setDisplay(SurfaceHolder)}, but 738 * does not support {@link #setScreenOnWhilePlaying(boolean)}. Setting a 739 * Surface will un-set any Surface or SurfaceHolder that was previously set. 740 * A null surface will result in only the audio track being played. 741 * 742 * If the Surface sends frames to a {@link SurfaceTexture}, the timestamps 743 * returned from {@link SurfaceTexture#getTimestamp()} will have an 744 * unspecified zero point. These timestamps cannot be directly compared 745 * between different media sources, different instances of the same media 746 * source, or multiple runs of the same program. The timestamp is normally 747 * monotonically increasing and is unaffected by time-of-day adjustments, 748 * but it is reset when the position is set. 749 * 750 * @param surface The {@link Surface} to be used for the video portion of 751 * the media. 752 */ 753 public void setSurface(Surface surface) { 754 if (mScreenOnWhilePlaying && surface != null) { 755 Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective for Surface"); 756 } 757 mSurfaceHolder = null; 758 _setVideoSurface(surface); 759 updateSurfaceScreenOn(); 760 } 761 762 /* Do not change these video scaling mode values below without updating 763 * their counterparts in system/window.h! Please do not forget to update 764 * {@link #isVideoScalingModeSupported} when new video scaling modes 765 * are added. 766 */ 767 /** 768 * Specifies a video scaling mode. The content is stretched to the 769 * surface rendering area. When the surface has the same aspect ratio 770 * as the content, the aspect ratio of the content is maintained; 771 * otherwise, the aspect ratio of the content is not maintained when video 772 * is being rendered. Unlike {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}, 773 * there is no content cropping with this video scaling mode. 774 */ 775 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1; 776 777 /** 778 * Specifies a video scaling mode. The content is scaled, maintaining 779 * its aspect ratio. The whole surface area is always used. When the 780 * aspect ratio of the content is the same as the surface, no content 781 * is cropped; otherwise, content is cropped to fit the surface. 782 */ 783 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2; 784 /** 785 * Sets video scaling mode. To make the target video scaling mode 786 * effective during playback, this method must be called after 787 * data source is set. If not called, the default video 788 * scaling mode is {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}. 789 * 790 * <p> The supported video scaling modes are: 791 * <ul> 792 * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT} 793 * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING} 794 * </ul> 795 * 796 * @param mode target video scaling mode. Most be one of the supported 797 * video scaling modes; otherwise, IllegalArgumentException will be thrown. 798 * 799 * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT 800 * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING 801 */ 802 public void setVideoScalingMode(int mode) { 803 if (!isVideoScalingModeSupported(mode)) { 804 final String msg = "Scaling mode " + mode + " is not supported"; 805 throw new IllegalArgumentException(msg); 806 } 807 Parcel request = Parcel.obtain(); 808 Parcel reply = Parcel.obtain(); 809 try { 810 request.writeInterfaceToken(IMEDIA_PLAYER); 811 request.writeInt(INVOKE_ID_SET_VIDEO_SCALE_MODE); 812 request.writeInt(mode); 813 invoke(request, reply); 814 } finally { 815 request.recycle(); 816 reply.recycle(); 817 } 818 } 819 820 /** 821 * Convenience method to create a MediaPlayer for a given Uri. 822 * On success, {@link #prepare()} will already have been called and must not be called again. 823 * <p>When done with the MediaPlayer, you should call {@link #release()}, 824 * to free the resources. If not released, too many MediaPlayer instances will 825 * result in an exception.</p> 826 * <p>Note that since {@link #prepare()} is called automatically in this method, 827 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 828 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 829 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 830 * 831 * @param context the Context to use 832 * @param uri the Uri from which to get the datasource 833 * @return a MediaPlayer object, or null if creation failed 834 */ 835 public static MediaPlayer create(Context context, Uri uri) { 836 return create (context, uri, null); 837 } 838 839 /** 840 * Convenience method to create a MediaPlayer for a given Uri. 841 * On success, {@link #prepare()} will already have been called and must not be called again. 842 * <p>When done with the MediaPlayer, you should call {@link #release()}, 843 * to free the resources. If not released, too many MediaPlayer instances will 844 * result in an exception.</p> 845 * <p>Note that since {@link #prepare()} is called automatically in this method, 846 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 847 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 848 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 849 * 850 * @param context the Context to use 851 * @param uri the Uri from which to get the datasource 852 * @param holder the SurfaceHolder to use for displaying the video 853 * @return a MediaPlayer object, or null if creation failed 854 */ 855 public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder) { 856 int s = AudioSystem.newAudioSessionId(); 857 return create(context, uri, holder, null, s > 0 ? s : 0); 858 } 859 860 /** 861 * Same factory method as {@link #create(Context, Uri, SurfaceHolder)} but that lets you specify 862 * the audio attributes and session ID to be used by the new MediaPlayer instance. 863 * @param context the Context to use 864 * @param uri the Uri from which to get the datasource 865 * @param holder the SurfaceHolder to use for displaying the video, may be null. 866 * @param audioAttributes the {@link AudioAttributes} to be used by the media player. 867 * @param audioSessionId the audio session ID to be used by the media player, 868 * see {@link AudioManager#generateAudioSessionId()} to obtain a new session. 869 * @return a MediaPlayer object, or null if creation failed 870 */ 871 public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder, 872 AudioAttributes audioAttributes, int audioSessionId) { 873 874 try { 875 MediaPlayer mp = new MediaPlayer(); 876 final AudioAttributes aa = audioAttributes != null ? audioAttributes : 877 new AudioAttributes.Builder().build(); 878 mp.setAudioAttributes(aa); 879 mp.setAudioSessionId(audioSessionId); 880 mp.setDataSource(context, uri); 881 if (holder != null) { 882 mp.setDisplay(holder); 883 } 884 mp.prepare(); 885 return mp; 886 } catch (IOException ex) { 887 Log.d(TAG, "create failed:", ex); 888 // fall through 889 } catch (IllegalArgumentException ex) { 890 Log.d(TAG, "create failed:", ex); 891 // fall through 892 } catch (SecurityException ex) { 893 Log.d(TAG, "create failed:", ex); 894 // fall through 895 } 896 897 return null; 898 } 899 900 // Note no convenience method to create a MediaPlayer with SurfaceTexture sink. 901 902 /** 903 * Convenience method to create a MediaPlayer for a given resource id. 904 * On success, {@link #prepare()} will already have been called and must not be called again. 905 * <p>When done with the MediaPlayer, you should call {@link #release()}, 906 * to free the resources. If not released, too many MediaPlayer instances will 907 * result in an exception.</p> 908 * <p>Note that since {@link #prepare()} is called automatically in this method, 909 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 910 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 911 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 912 * 913 * @param context the Context to use 914 * @param resid the raw resource id (<var>R.raw.<something></var>) for 915 * the resource to use as the datasource 916 * @return a MediaPlayer object, or null if creation failed 917 */ 918 public static MediaPlayer create(Context context, int resid) { 919 int s = AudioSystem.newAudioSessionId(); 920 return create(context, resid, null, s > 0 ? s : 0); 921 } 922 923 /** 924 * Same factory method as {@link #create(Context, int)} but that lets you specify the audio 925 * attributes and session ID to be used by the new MediaPlayer instance. 926 * @param context the Context to use 927 * @param resid the raw resource id (<var>R.raw.<something></var>) for 928 * the resource to use as the datasource 929 * @param audioAttributes the {@link AudioAttributes} to be used by the media player. 930 * @param audioSessionId the audio session ID to be used by the media player, 931 * see {@link AudioManager#generateAudioSessionId()} to obtain a new session. 932 * @return a MediaPlayer object, or null if creation failed 933 */ 934 public static MediaPlayer create(Context context, int resid, 935 AudioAttributes audioAttributes, int audioSessionId) { 936 try { 937 AssetFileDescriptor afd = context.getResources().openRawResourceFd(resid); 938 if (afd == null) return null; 939 940 MediaPlayer mp = new MediaPlayer(); 941 942 final AudioAttributes aa = audioAttributes != null ? audioAttributes : 943 new AudioAttributes.Builder().build(); 944 mp.setAudioAttributes(aa); 945 mp.setAudioSessionId(audioSessionId); 946 947 mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); 948 afd.close(); 949 mp.prepare(); 950 return mp; 951 } catch (IOException ex) { 952 Log.d(TAG, "create failed:", ex); 953 // fall through 954 } catch (IllegalArgumentException ex) { 955 Log.d(TAG, "create failed:", ex); 956 // fall through 957 } catch (SecurityException ex) { 958 Log.d(TAG, "create failed:", ex); 959 // fall through 960 } 961 return null; 962 } 963 964 /** 965 * Sets the data source as a content Uri. 966 * 967 * @param context the Context to use when resolving the Uri 968 * @param uri the Content URI of the data you want to play 969 * @throws IllegalStateException if it is called in an invalid state 970 */ 971 public void setDataSource(Context context, Uri uri) 972 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 973 setDataSource(context, uri, null); 974 } 975 976 /** 977 * Sets the data source as a content Uri. 978 * 979 * @param context the Context to use when resolving the Uri 980 * @param uri the Content URI of the data you want to play 981 * @param headers the headers to be sent together with the request for the data 982 * Note that the cross domain redirection is allowed by default, but that can be 983 * changed with key/value pairs through the headers parameter with 984 * "android-allow-cross-domain-redirect" as the key and "0" or "1" as the value 985 * to disallow or allow cross domain redirection. 986 * @throws IllegalStateException if it is called in an invalid state 987 */ 988 public void setDataSource(Context context, Uri uri, Map<String, String> headers) 989 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 990 final String scheme = uri.getScheme(); 991 if (ContentResolver.SCHEME_FILE.equals(scheme)) { 992 setDataSource(uri.getPath()); 993 return; 994 } else if (ContentResolver.SCHEME_CONTENT.equals(scheme) 995 && Settings.AUTHORITY.equals(uri.getAuthority())) { 996 // Redirect ringtones to go directly to underlying provider 997 uri = RingtoneManager.getActualDefaultRingtoneUri(context, 998 RingtoneManager.getDefaultType(uri)); 999 if (uri == null) { 1000 throw new FileNotFoundException("Failed to resolve default ringtone"); 1001 } 1002 } 1003 1004 AssetFileDescriptor fd = null; 1005 try { 1006 ContentResolver resolver = context.getContentResolver(); 1007 fd = resolver.openAssetFileDescriptor(uri, "r"); 1008 if (fd == null) { 1009 return; 1010 } 1011 // Note: using getDeclaredLength so that our behavior is the same 1012 // as previous versions when the content provider is returning 1013 // a full file. 1014 if (fd.getDeclaredLength() < 0) { 1015 setDataSource(fd.getFileDescriptor()); 1016 } else { 1017 setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getDeclaredLength()); 1018 } 1019 return; 1020 } catch (SecurityException ex) { 1021 } catch (IOException ex) { 1022 } finally { 1023 if (fd != null) { 1024 fd.close(); 1025 } 1026 } 1027 1028 Log.d(TAG, "Couldn't open file on client side, trying server side"); 1029 1030 setDataSource(uri.toString(), headers); 1031 } 1032 1033 /** 1034 * Sets the data source (file-path or http/rtsp URL) to use. 1035 * 1036 * @param path the path of the file, or the http/rtsp URL of the stream you want to play 1037 * @throws IllegalStateException if it is called in an invalid state 1038 * 1039 * <p>When <code>path</code> refers to a local file, the file may actually be opened by a 1040 * process other than the calling application. This implies that the pathname 1041 * should be an absolute path (as any other process runs with unspecified current working 1042 * directory), and that the pathname should reference a world-readable file. 1043 * As an alternative, the application could first open the file for reading, 1044 * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}. 1045 */ 1046 public void setDataSource(String path) 1047 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 1048 setDataSource(path, null, null); 1049 } 1050 1051 /** 1052 * Sets the data source (file-path or http/rtsp URL) to use. 1053 * 1054 * @param path the path of the file, or the http/rtsp URL of the stream you want to play 1055 * @param headers the headers associated with the http request for the stream you want to play 1056 * @throws IllegalStateException if it is called in an invalid state 1057 * @hide pending API council 1058 */ 1059 public void setDataSource(String path, Map<String, String> headers) 1060 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException 1061 { 1062 String[] keys = null; 1063 String[] values = null; 1064 1065 if (headers != null) { 1066 keys = new String[headers.size()]; 1067 values = new String[headers.size()]; 1068 1069 int i = 0; 1070 for (Map.Entry<String, String> entry: headers.entrySet()) { 1071 keys[i] = entry.getKey(); 1072 values[i] = entry.getValue(); 1073 ++i; 1074 } 1075 } 1076 setDataSource(path, keys, values); 1077 } 1078 1079 private void setDataSource(String path, String[] keys, String[] values) 1080 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 1081 final Uri uri = Uri.parse(path); 1082 final String scheme = uri.getScheme(); 1083 if ("file".equals(scheme)) { 1084 path = uri.getPath(); 1085 } else if (scheme != null) { 1086 // handle non-file sources 1087 nativeSetDataSource( 1088 MediaHTTPService.createHttpServiceBinderIfNecessary(path), 1089 path, 1090 keys, 1091 values); 1092 return; 1093 } 1094 1095 final File file = new File(path); 1096 if (file.exists()) { 1097 FileInputStream is = new FileInputStream(file); 1098 FileDescriptor fd = is.getFD(); 1099 setDataSource(fd); 1100 is.close(); 1101 } else { 1102 throw new IOException("setDataSource failed."); 1103 } 1104 } 1105 1106 private native void nativeSetDataSource( 1107 IBinder httpServiceBinder, String path, String[] keys, String[] values) 1108 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException; 1109 1110 /** 1111 * Sets the data source (FileDescriptor) to use. It is the caller's responsibility 1112 * to close the file descriptor. It is safe to do so as soon as this call returns. 1113 * 1114 * @param fd the FileDescriptor for the file you want to play 1115 * @throws IllegalStateException if it is called in an invalid state 1116 */ 1117 public void setDataSource(FileDescriptor fd) 1118 throws IOException, IllegalArgumentException, IllegalStateException { 1119 // intentionally less than LONG_MAX 1120 setDataSource(fd, 0, 0x7ffffffffffffffL); 1121 } 1122 1123 /** 1124 * Sets the data source (FileDescriptor) to use. The FileDescriptor must be 1125 * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility 1126 * to close the file descriptor. It is safe to do so as soon as this call returns. 1127 * 1128 * @param fd the FileDescriptor for the file you want to play 1129 * @param offset the offset into the file where the data to be played starts, in bytes 1130 * @param length the length in bytes of the data to be played 1131 * @throws IllegalStateException if it is called in an invalid state 1132 */ 1133 public void setDataSource(FileDescriptor fd, long offset, long length) 1134 throws IOException, IllegalArgumentException, IllegalStateException { 1135 _setDataSource(fd, offset, length); 1136 } 1137 1138 private native void _setDataSource(FileDescriptor fd, long offset, long length) 1139 throws IOException, IllegalArgumentException, IllegalStateException; 1140 1141 /** 1142 * Sets the data source (MediaDataSource) to use. 1143 * 1144 * @param dataSource the MediaDataSource for the media you want to play 1145 * @throws IllegalStateException if it is called in an invalid state 1146 */ 1147 public void setDataSource(MediaDataSource dataSource) 1148 throws IllegalArgumentException, IllegalStateException { 1149 _setDataSource(dataSource); 1150 } 1151 1152 private native void _setDataSource(MediaDataSource dataSource) 1153 throws IllegalArgumentException, IllegalStateException; 1154 1155 /** 1156 * Prepares the player for playback, synchronously. 1157 * 1158 * After setting the datasource and the display surface, you need to either 1159 * call prepare() or prepareAsync(). For files, it is OK to call prepare(), 1160 * which blocks until MediaPlayer is ready for playback. 1161 * 1162 * @throws IllegalStateException if it is called in an invalid state 1163 */ 1164 public void prepare() throws IOException, IllegalStateException { 1165 _prepare(); 1166 scanInternalSubtitleTracks(); 1167 } 1168 1169 private native void _prepare() throws IOException, IllegalStateException; 1170 1171 /** 1172 * Prepares the player for playback, asynchronously. 1173 * 1174 * After setting the datasource and the display surface, you need to either 1175 * call prepare() or prepareAsync(). For streams, you should call prepareAsync(), 1176 * which returns immediately, rather than blocking until enough data has been 1177 * buffered. 1178 * 1179 * @throws IllegalStateException if it is called in an invalid state 1180 */ 1181 public native void prepareAsync() throws IllegalStateException; 1182 1183 /** 1184 * Starts or resumes playback. If playback had previously been paused, 1185 * playback will continue from where it was paused. If playback had 1186 * been stopped, or never started before, playback will start at the 1187 * beginning. 1188 * 1189 * @throws IllegalStateException if it is called in an invalid state 1190 */ 1191 public void start() throws IllegalStateException { 1192 if (isRestricted()) { 1193 _setVolume(0, 0); 1194 } 1195 stayAwake(true); 1196 _start(); 1197 } 1198 1199 private native void _start() throws IllegalStateException; 1200 1201 private boolean isRestricted() { 1202 if (mBypassInterruptionPolicy) { 1203 return false; 1204 } 1205 try { 1206 final int usage = mUsage != -1 ? mUsage 1207 : AudioAttributes.usageForLegacyStreamType(getAudioStreamType()); 1208 final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, usage, 1209 Process.myUid(), ActivityThread.currentPackageName()); 1210 return mode != AppOpsManager.MODE_ALLOWED; 1211 } catch (RemoteException e) { 1212 return false; 1213 } 1214 } 1215 1216 private int getAudioStreamType() { 1217 if (mStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) { 1218 mStreamType = _getAudioStreamType(); 1219 } 1220 return mStreamType; 1221 } 1222 1223 private native int _getAudioStreamType() throws IllegalStateException; 1224 1225 /** 1226 * Stops playback after playback has been stopped or paused. 1227 * 1228 * @throws IllegalStateException if the internal player engine has not been 1229 * initialized. 1230 */ 1231 public void stop() throws IllegalStateException { 1232 stayAwake(false); 1233 _stop(); 1234 } 1235 1236 private native void _stop() throws IllegalStateException; 1237 1238 /** 1239 * Pauses playback. Call start() to resume. 1240 * 1241 * @throws IllegalStateException if the internal player engine has not been 1242 * initialized. 1243 */ 1244 public void pause() throws IllegalStateException { 1245 stayAwake(false); 1246 _pause(); 1247 } 1248 1249 private native void _pause() throws IllegalStateException; 1250 1251 /** 1252 * Set the low-level power management behavior for this MediaPlayer. This 1253 * can be used when the MediaPlayer is not playing through a SurfaceHolder 1254 * set with {@link #setDisplay(SurfaceHolder)} and thus can use the 1255 * high-level {@link #setScreenOnWhilePlaying(boolean)} feature. 1256 * 1257 * <p>This function has the MediaPlayer access the low-level power manager 1258 * service to control the device's power usage while playing is occurring. 1259 * The parameter is a combination of {@link android.os.PowerManager} wake flags. 1260 * Use of this method requires {@link android.Manifest.permission#WAKE_LOCK} 1261 * permission. 1262 * By default, no attempt is made to keep the device awake during playback. 1263 * 1264 * @param context the Context to use 1265 * @param mode the power/wake mode to set 1266 * @see android.os.PowerManager 1267 */ 1268 public void setWakeMode(Context context, int mode) { 1269 boolean washeld = false; 1270 if (mWakeLock != null) { 1271 if (mWakeLock.isHeld()) { 1272 washeld = true; 1273 mWakeLock.release(); 1274 } 1275 mWakeLock = null; 1276 } 1277 1278 PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE); 1279 mWakeLock = pm.newWakeLock(mode|PowerManager.ON_AFTER_RELEASE, MediaPlayer.class.getName()); 1280 mWakeLock.setReferenceCounted(false); 1281 if (washeld) { 1282 mWakeLock.acquire(); 1283 } 1284 } 1285 1286 /** 1287 * Control whether we should use the attached SurfaceHolder to keep the 1288 * screen on while video playback is occurring. This is the preferred 1289 * method over {@link #setWakeMode} where possible, since it doesn't 1290 * require that the application have permission for low-level wake lock 1291 * access. 1292 * 1293 * @param screenOn Supply true to keep the screen on, false to allow it 1294 * to turn off. 1295 */ 1296 public void setScreenOnWhilePlaying(boolean screenOn) { 1297 if (mScreenOnWhilePlaying != screenOn) { 1298 if (screenOn && mSurfaceHolder == null) { 1299 Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective without a SurfaceHolder"); 1300 } 1301 mScreenOnWhilePlaying = screenOn; 1302 updateSurfaceScreenOn(); 1303 } 1304 } 1305 1306 private void stayAwake(boolean awake) { 1307 if (mWakeLock != null) { 1308 if (awake && !mWakeLock.isHeld()) { 1309 mWakeLock.acquire(); 1310 } else if (!awake && mWakeLock.isHeld()) { 1311 mWakeLock.release(); 1312 } 1313 } 1314 mStayAwake = awake; 1315 updateSurfaceScreenOn(); 1316 } 1317 1318 private void updateSurfaceScreenOn() { 1319 if (mSurfaceHolder != null) { 1320 mSurfaceHolder.setKeepScreenOn(mScreenOnWhilePlaying && mStayAwake); 1321 } 1322 } 1323 1324 /** 1325 * Returns the width of the video. 1326 * 1327 * @return the width of the video, or 0 if there is no video, 1328 * no display surface was set, or the width has not been determined 1329 * yet. The OnVideoSizeChangedListener can be registered via 1330 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)} 1331 * to provide a notification when the width is available. 1332 */ 1333 public native int getVideoWidth(); 1334 1335 /** 1336 * Returns the height of the video. 1337 * 1338 * @return the height of the video, or 0 if there is no video, 1339 * no display surface was set, or the height has not been determined 1340 * yet. The OnVideoSizeChangedListener can be registered via 1341 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)} 1342 * to provide a notification when the height is available. 1343 */ 1344 public native int getVideoHeight(); 1345 1346 /** 1347 * Checks whether the MediaPlayer is playing. 1348 * 1349 * @return true if currently playing, false otherwise 1350 * @throws IllegalStateException if the internal player engine has not been 1351 * initialized or has been released. 1352 */ 1353 public native boolean isPlaying(); 1354 1355 /** 1356 * Change playback speed of audio by resampling the audio. 1357 * <p> 1358 * Specifies resampling as audio mode for variable rate playback, i.e., 1359 * resample the waveform based on the requested playback rate to get 1360 * a new waveform, and play back the new waveform at the original sampling 1361 * frequency. 1362 * When rate is larger than 1.0, pitch becomes higher. 1363 * When rate is smaller than 1.0, pitch becomes lower. 1364 * 1365 * @hide 1366 */ 1367 public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2; 1368 1369 /** 1370 * Change playback speed of audio without changing its pitch. 1371 * <p> 1372 * Specifies time stretching as audio mode for variable rate playback. 1373 * Time stretching changes the duration of the audio samples without 1374 * affecting its pitch. 1375 * <p> 1376 * This mode is only supported for a limited range of playback speed factors, 1377 * e.g. between 1/2x and 2x. 1378 * 1379 * @hide 1380 */ 1381 public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1; 1382 1383 /** 1384 * Change playback speed of audio without changing its pitch, and 1385 * possibly mute audio if time stretching is not supported for the playback 1386 * speed. 1387 * <p> 1388 * Try to keep audio pitch when changing the playback rate, but allow the 1389 * system to determine how to change audio playback if the rate is out 1390 * of range. 1391 * 1392 * @hide 1393 */ 1394 public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0; 1395 1396 /** @hide */ 1397 @IntDef( 1398 value = { 1399 PLAYBACK_RATE_AUDIO_MODE_DEFAULT, 1400 PLAYBACK_RATE_AUDIO_MODE_STRETCH, 1401 PLAYBACK_RATE_AUDIO_MODE_RESAMPLE, 1402 }) 1403 @Retention(RetentionPolicy.SOURCE) 1404 public @interface PlaybackRateAudioMode {} 1405 1406 /** 1407 * Sets playback rate and audio mode. 1408 * 1409 * @param rate the ratio between desired playback rate and normal one. 1410 * @param audioMode audio playback mode. Must be one of the supported 1411 * audio modes. 1412 * 1413 * @throws IllegalStateException if the internal player engine has not been 1414 * initialized. 1415 * @throws IllegalArgumentException if audioMode is not supported. 1416 * 1417 * @hide 1418 */ 1419 @NonNull 1420 public PlaybackParams easyPlaybackParams(float rate, @PlaybackRateAudioMode int audioMode) { 1421 PlaybackParams params = new PlaybackParams(); 1422 params.allowDefaults(); 1423 switch (audioMode) { 1424 case PLAYBACK_RATE_AUDIO_MODE_DEFAULT: 1425 params.setSpeed(rate).setPitch(1.0f); 1426 break; 1427 case PLAYBACK_RATE_AUDIO_MODE_STRETCH: 1428 params.setSpeed(rate).setPitch(1.0f) 1429 .setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL); 1430 break; 1431 case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE: 1432 params.setSpeed(rate).setPitch(rate); 1433 break; 1434 default: 1435 final String msg = "Audio playback mode " + audioMode + " is not supported"; 1436 throw new IllegalArgumentException(msg); 1437 } 1438 return params; 1439 } 1440 1441 /** 1442 * Sets playback rate using {@link PlaybackParams}. 1443 * 1444 * @param params the playback params. 1445 * 1446 * @throws IllegalStateException if the internal player engine has not been 1447 * initialized. 1448 * @throws IllegalArgumentException if params is not supported. 1449 */ 1450 public native void setPlaybackParams(@NonNull PlaybackParams params); 1451 1452 /** 1453 * Gets the playback params, containing the current playback rate. 1454 * 1455 * @return the playback params. 1456 * @throws IllegalStateException if the internal player engine has not been 1457 * initialized. 1458 */ 1459 @NonNull 1460 public native PlaybackParams getPlaybackParams(); 1461 1462 /** 1463 * Sets A/V sync mode. 1464 * 1465 * @param params the A/V sync params to apply 1466 * 1467 * @throws IllegalStateException if the internal player engine has not been 1468 * initialized. 1469 * @throws IllegalArgumentException if params are not supported. 1470 */ 1471 public native void setSyncParams(@NonNull SyncParams params); 1472 1473 /** 1474 * Gets the A/V sync mode. 1475 * 1476 * @return the A/V sync params 1477 * 1478 * @throws IllegalStateException if the internal player engine has not been 1479 * initialized. 1480 */ 1481 @NonNull 1482 public native SyncParams getSyncParams(); 1483 1484 /** 1485 * Seeks to specified time position. 1486 * 1487 * @param msec the offset in milliseconds from the start to seek to 1488 * @throws IllegalStateException if the internal player engine has not been 1489 * initialized 1490 */ 1491 public native void seekTo(int msec) throws IllegalStateException; 1492 1493 /** 1494 * Get current playback position as a {@link MediaTimestamp}. 1495 * <p> 1496 * The MediaTimestamp represents how the media time correlates to the system time in 1497 * a linear fashion using an anchor and a clock rate. During regular playback, the media 1498 * time moves fairly constantly (though the anchor frame may be rebased to a current 1499 * system time, the linear correlation stays steady). Therefore, this method does not 1500 * need to be called often. 1501 * <p> 1502 * To help users get current playback position, this method always anchors the timestamp 1503 * to the current {@link System#nanoTime system time}, so 1504 * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position. 1505 * 1506 * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp 1507 * is available, e.g. because the media player has not been initialized. 1508 * 1509 * @see MediaTimestamp 1510 */ 1511 @Nullable 1512 public MediaTimestamp getTimestamp() 1513 { 1514 try { 1515 // TODO: get the timestamp from native side 1516 return new MediaTimestamp( 1517 getCurrentPosition() * 1000L, 1518 System.nanoTime(), 1519 isPlaying() ? getPlaybackParams().getSpeed() : 0.f); 1520 } catch (IllegalStateException e) { 1521 return null; 1522 } 1523 } 1524 1525 /** 1526 * Gets the current playback position. 1527 * 1528 * @return the current position in milliseconds 1529 */ 1530 public native int getCurrentPosition(); 1531 1532 /** 1533 * Gets the duration of the file. 1534 * 1535 * @return the duration in milliseconds, if no duration is available 1536 * (for example, if streaming live content), -1 is returned. 1537 */ 1538 public native int getDuration(); 1539 1540 /** 1541 * Gets the media metadata. 1542 * 1543 * @param update_only controls whether the full set of available 1544 * metadata is returned or just the set that changed since the 1545 * last call. See {@see #METADATA_UPDATE_ONLY} and {@see 1546 * #METADATA_ALL}. 1547 * 1548 * @param apply_filter if true only metadata that matches the 1549 * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see 1550 * #BYPASS_METADATA_FILTER}. 1551 * 1552 * @return The metadata, possibly empty. null if an error occured. 1553 // FIXME: unhide. 1554 * {@hide} 1555 */ 1556 public Metadata getMetadata(final boolean update_only, 1557 final boolean apply_filter) { 1558 Parcel reply = Parcel.obtain(); 1559 Metadata data = new Metadata(); 1560 1561 if (!native_getMetadata(update_only, apply_filter, reply)) { 1562 reply.recycle(); 1563 return null; 1564 } 1565 1566 // Metadata takes over the parcel, don't recycle it unless 1567 // there is an error. 1568 if (!data.parse(reply)) { 1569 reply.recycle(); 1570 return null; 1571 } 1572 return data; 1573 } 1574 1575 /** 1576 * Set a filter for the metadata update notification and update 1577 * retrieval. The caller provides 2 set of metadata keys, allowed 1578 * and blocked. The blocked set always takes precedence over the 1579 * allowed one. 1580 * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as 1581 * shorthands to allow/block all or no metadata. 1582 * 1583 * By default, there is no filter set. 1584 * 1585 * @param allow Is the set of metadata the client is interested 1586 * in receiving new notifications for. 1587 * @param block Is the set of metadata the client is not interested 1588 * in receiving new notifications for. 1589 * @return The call status code. 1590 * 1591 // FIXME: unhide. 1592 * {@hide} 1593 */ 1594 public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) { 1595 // Do our serialization manually instead of calling 1596 // Parcel.writeArray since the sets are made of the same type 1597 // we avoid paying the price of calling writeValue (used by 1598 // writeArray) which burns an extra int per element to encode 1599 // the type. 1600 Parcel request = newRequest(); 1601 1602 // The parcel starts already with an interface token. There 1603 // are 2 filters. Each one starts with a 4bytes number to 1604 // store the len followed by a number of int (4 bytes as well) 1605 // representing the metadata type. 1606 int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size()); 1607 1608 if (request.dataCapacity() < capacity) { 1609 request.setDataCapacity(capacity); 1610 } 1611 1612 request.writeInt(allow.size()); 1613 for(Integer t: allow) { 1614 request.writeInt(t); 1615 } 1616 request.writeInt(block.size()); 1617 for(Integer t: block) { 1618 request.writeInt(t); 1619 } 1620 return native_setMetadataFilter(request); 1621 } 1622 1623 /** 1624 * Set the MediaPlayer to start when this MediaPlayer finishes playback 1625 * (i.e. reaches the end of the stream). 1626 * The media framework will attempt to transition from this player to 1627 * the next as seamlessly as possible. The next player can be set at 1628 * any time before completion. The next player must be prepared by the 1629 * app, and the application should not call start() on it. 1630 * The next MediaPlayer must be different from 'this'. An exception 1631 * will be thrown if next == this. 1632 * The application may call setNextMediaPlayer(null) to indicate no 1633 * next player should be started at the end of playback. 1634 * If the current player is looping, it will keep looping and the next 1635 * player will not be started. 1636 * 1637 * @param next the player to start after this one completes playback. 1638 * 1639 */ 1640 public native void setNextMediaPlayer(MediaPlayer next); 1641 1642 /** 1643 * Releases resources associated with this MediaPlayer object. 1644 * It is considered good practice to call this method when you're 1645 * done using the MediaPlayer. In particular, whenever an Activity 1646 * of an application is paused (its onPause() method is called), 1647 * or stopped (its onStop() method is called), this method should be 1648 * invoked to release the MediaPlayer object, unless the application 1649 * has a special need to keep the object around. In addition to 1650 * unnecessary resources (such as memory and instances of codecs) 1651 * being held, failure to call this method immediately if a 1652 * MediaPlayer object is no longer needed may also lead to 1653 * continuous battery consumption for mobile devices, and playback 1654 * failure for other applications if no multiple instances of the 1655 * same codec are supported on a device. Even if multiple instances 1656 * of the same codec are supported, some performance degradation 1657 * may be expected when unnecessary multiple instances are used 1658 * at the same time. 1659 */ 1660 public void release() { 1661 stayAwake(false); 1662 updateSurfaceScreenOn(); 1663 mOnPreparedListener = null; 1664 mOnBufferingUpdateListener = null; 1665 mOnCompletionListener = null; 1666 mOnSeekCompleteListener = null; 1667 mOnErrorListener = null; 1668 mOnInfoListener = null; 1669 mOnVideoSizeChangedListener = null; 1670 mOnTimedTextListener = null; 1671 if (mTimeProvider != null) { 1672 mTimeProvider.close(); 1673 mTimeProvider = null; 1674 } 1675 mOnSubtitleDataListener = null; 1676 _release(); 1677 } 1678 1679 private native void _release(); 1680 1681 /** 1682 * Resets the MediaPlayer to its uninitialized state. After calling 1683 * this method, you will have to initialize it again by setting the 1684 * data source and calling prepare(). 1685 */ 1686 public void reset() { 1687 mSelectedSubtitleTrackIndex = -1; 1688 synchronized(mOpenSubtitleSources) { 1689 for (final InputStream is: mOpenSubtitleSources) { 1690 try { 1691 is.close(); 1692 } catch (IOException e) { 1693 } 1694 } 1695 mOpenSubtitleSources.clear(); 1696 } 1697 if (mSubtitleController != null) { 1698 mSubtitleController.reset(); 1699 } 1700 if (mTimeProvider != null) { 1701 mTimeProvider.close(); 1702 mTimeProvider = null; 1703 } 1704 1705 stayAwake(false); 1706 _reset(); 1707 // make sure none of the listeners get called anymore 1708 if (mEventHandler != null) { 1709 mEventHandler.removeCallbacksAndMessages(null); 1710 } 1711 1712 synchronized (mIndexTrackPairs) { 1713 mIndexTrackPairs.clear(); 1714 mInbandTrackIndices.clear(); 1715 }; 1716 } 1717 1718 private native void _reset(); 1719 1720 /** 1721 * Sets the audio stream type for this MediaPlayer. See {@link AudioManager} 1722 * for a list of stream types. Must call this method before prepare() or 1723 * prepareAsync() in order for the target stream type to become effective 1724 * thereafter. 1725 * 1726 * @param streamtype the audio stream type 1727 * @see android.media.AudioManager 1728 */ 1729 public void setAudioStreamType(int streamtype) { 1730 _setAudioStreamType(streamtype); 1731 mStreamType = streamtype; 1732 } 1733 1734 private native void _setAudioStreamType(int streamtype); 1735 1736 // Keep KEY_PARAMETER_* in sync with include/media/mediaplayer.h 1737 private final static int KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400; 1738 /** 1739 * Sets the parameter indicated by key. 1740 * @param key key indicates the parameter to be set. 1741 * @param value value of the parameter to be set. 1742 * @return true if the parameter is set successfully, false otherwise 1743 * {@hide} 1744 */ 1745 private native boolean setParameter(int key, Parcel value); 1746 1747 /** 1748 * Sets the audio attributes for this MediaPlayer. 1749 * See {@link AudioAttributes} for how to build and configure an instance of this class. 1750 * You must call this method before {@link #prepare()} or {@link #prepareAsync()} in order 1751 * for the audio attributes to become effective thereafter. 1752 * @param attributes a non-null set of audio attributes 1753 */ 1754 public void setAudioAttributes(AudioAttributes attributes) throws IllegalArgumentException { 1755 if (attributes == null) { 1756 final String msg = "Cannot set AudioAttributes to null"; 1757 throw new IllegalArgumentException(msg); 1758 } 1759 mUsage = attributes.getUsage(); 1760 mBypassInterruptionPolicy = (attributes.getFlags() 1761 & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0; 1762 Parcel pattributes = Parcel.obtain(); 1763 attributes.writeToParcel(pattributes, AudioAttributes.FLATTEN_TAGS); 1764 setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, pattributes); 1765 pattributes.recycle(); 1766 } 1767 1768 /** 1769 * Sets the player to be looping or non-looping. 1770 * 1771 * @param looping whether to loop or not 1772 */ 1773 public native void setLooping(boolean looping); 1774 1775 /** 1776 * Checks whether the MediaPlayer is looping or non-looping. 1777 * 1778 * @return true if the MediaPlayer is currently looping, false otherwise 1779 */ 1780 public native boolean isLooping(); 1781 1782 /** 1783 * Sets the volume on this player. 1784 * This API is recommended for balancing the output of audio streams 1785 * within an application. Unless you are writing an application to 1786 * control user settings, this API should be used in preference to 1787 * {@link AudioManager#setStreamVolume(int, int, int)} which sets the volume of ALL streams of 1788 * a particular type. Note that the passed volume values are raw scalars in range 0.0 to 1.0. 1789 * UI controls should be scaled logarithmically. 1790 * 1791 * @param leftVolume left volume scalar 1792 * @param rightVolume right volume scalar 1793 */ 1794 /* 1795 * FIXME: Merge this into javadoc comment above when setVolume(float) is not @hide. 1796 * The single parameter form below is preferred if the channel volumes don't need 1797 * to be set independently. 1798 */ 1799 public void setVolume(float leftVolume, float rightVolume) { 1800 if (isRestricted()) { 1801 return; 1802 } 1803 _setVolume(leftVolume, rightVolume); 1804 } 1805 1806 private native void _setVolume(float leftVolume, float rightVolume); 1807 1808 /** 1809 * Similar, excepts sets volume of all channels to same value. 1810 * @hide 1811 */ 1812 public void setVolume(float volume) { 1813 setVolume(volume, volume); 1814 } 1815 1816 /** 1817 * Sets the audio session ID. 1818 * 1819 * @param sessionId the audio session ID. 1820 * The audio session ID is a system wide unique identifier for the audio stream played by 1821 * this MediaPlayer instance. 1822 * The primary use of the audio session ID is to associate audio effects to a particular 1823 * instance of MediaPlayer: if an audio session ID is provided when creating an audio effect, 1824 * this effect will be applied only to the audio content of media players within the same 1825 * audio session and not to the output mix. 1826 * When created, a MediaPlayer instance automatically generates its own audio session ID. 1827 * However, it is possible to force this player to be part of an already existing audio session 1828 * by calling this method. 1829 * This method must be called before one of the overloaded <code> setDataSource </code> methods. 1830 * @throws IllegalStateException if it is called in an invalid state 1831 */ 1832 public native void setAudioSessionId(int sessionId) throws IllegalArgumentException, IllegalStateException; 1833 1834 /** 1835 * Returns the audio session ID. 1836 * 1837 * @return the audio session ID. {@see #setAudioSessionId(int)} 1838 * Note that the audio session ID is 0 only if a problem occured when the MediaPlayer was contructed. 1839 */ 1840 public native int getAudioSessionId(); 1841 1842 /** 1843 * Attaches an auxiliary effect to the player. A typical auxiliary effect is a reverberation 1844 * effect which can be applied on any sound source that directs a certain amount of its 1845 * energy to this effect. This amount is defined by setAuxEffectSendLevel(). 1846 * See {@link #setAuxEffectSendLevel(float)}. 1847 * <p>After creating an auxiliary effect (e.g. 1848 * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with 1849 * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling this method 1850 * to attach the player to the effect. 1851 * <p>To detach the effect from the player, call this method with a null effect id. 1852 * <p>This method must be called after one of the overloaded <code> setDataSource </code> 1853 * methods. 1854 * @param effectId system wide unique id of the effect to attach 1855 */ 1856 public native void attachAuxEffect(int effectId); 1857 1858 1859 /** 1860 * Sets the send level of the player to the attached auxiliary effect. 1861 * See {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0. 1862 * <p>By default the send level is 0, so even if an effect is attached to the player 1863 * this method must be called for the effect to be applied. 1864 * <p>Note that the passed level value is a raw scalar. UI controls should be scaled 1865 * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB, 1866 * so an appropriate conversion from linear UI input x to level is: 1867 * x == 0 -> level = 0 1868 * 0 < x <= R -> level = 10^(72*(x-R)/20/R) 1869 * @param level send level scalar 1870 */ 1871 public void setAuxEffectSendLevel(float level) { 1872 if (isRestricted()) { 1873 return; 1874 } 1875 _setAuxEffectSendLevel(level); 1876 } 1877 1878 private native void _setAuxEffectSendLevel(float level); 1879 1880 /* 1881 * @param request Parcel destinated to the media player. The 1882 * Interface token must be set to the IMediaPlayer 1883 * one to be routed correctly through the system. 1884 * @param reply[out] Parcel that will contain the reply. 1885 * @return The status code. 1886 */ 1887 private native final int native_invoke(Parcel request, Parcel reply); 1888 1889 1890 /* 1891 * @param update_only If true fetch only the set of metadata that have 1892 * changed since the last invocation of getMetadata. 1893 * The set is built using the unfiltered 1894 * notifications the native player sent to the 1895 * MediaPlayerService during that period of 1896 * time. If false, all the metadatas are considered. 1897 * @param apply_filter If true, once the metadata set has been built based on 1898 * the value update_only, the current filter is applied. 1899 * @param reply[out] On return contains the serialized 1900 * metadata. Valid only if the call was successful. 1901 * @return The status code. 1902 */ 1903 private native final boolean native_getMetadata(boolean update_only, 1904 boolean apply_filter, 1905 Parcel reply); 1906 1907 /* 1908 * @param request Parcel with the 2 serialized lists of allowed 1909 * metadata types followed by the one to be 1910 * dropped. Each list starts with an integer 1911 * indicating the number of metadata type elements. 1912 * @return The status code. 1913 */ 1914 private native final int native_setMetadataFilter(Parcel request); 1915 1916 private static native final void native_init(); 1917 private native final void native_setup(Object mediaplayer_this); 1918 private native final void native_finalize(); 1919 1920 /** 1921 * Class for MediaPlayer to return each audio/video/subtitle track's metadata. 1922 * 1923 * @see android.media.MediaPlayer#getTrackInfo 1924 */ 1925 static public class TrackInfo implements Parcelable { 1926 /** 1927 * Gets the track type. 1928 * @return TrackType which indicates if the track is video, audio, timed text. 1929 */ 1930 public int getTrackType() { 1931 return mTrackType; 1932 } 1933 1934 /** 1935 * Gets the language code of the track. 1936 * @return a language code in either way of ISO-639-1 or ISO-639-2. 1937 * When the language is unknown or could not be determined, 1938 * ISO-639-2 language code, "und", is returned. 1939 */ 1940 public String getLanguage() { 1941 String language = mFormat.getString(MediaFormat.KEY_LANGUAGE); 1942 return language == null ? "und" : language; 1943 } 1944 1945 /** 1946 * Gets the {@link MediaFormat} of the track. If the format is 1947 * unknown or could not be determined, null is returned. 1948 */ 1949 public MediaFormat getFormat() { 1950 if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT 1951 || mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 1952 return mFormat; 1953 } 1954 return null; 1955 } 1956 1957 public static final int MEDIA_TRACK_TYPE_UNKNOWN = 0; 1958 public static final int MEDIA_TRACK_TYPE_VIDEO = 1; 1959 public static final int MEDIA_TRACK_TYPE_AUDIO = 2; 1960 public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3; 1961 public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4; 1962 public static final int MEDIA_TRACK_TYPE_METADATA = 5; 1963 1964 final int mTrackType; 1965 final MediaFormat mFormat; 1966 1967 TrackInfo(Parcel in) { 1968 mTrackType = in.readInt(); 1969 // TODO: parcel in the full MediaFormat; currently we are using createSubtitleFormat 1970 // even for audio/video tracks, meaning we only set the mime and language. 1971 String mime = in.readString(); 1972 String language = in.readString(); 1973 mFormat = MediaFormat.createSubtitleFormat(mime, language); 1974 1975 if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 1976 mFormat.setInteger(MediaFormat.KEY_IS_AUTOSELECT, in.readInt()); 1977 mFormat.setInteger(MediaFormat.KEY_IS_DEFAULT, in.readInt()); 1978 mFormat.setInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, in.readInt()); 1979 } 1980 } 1981 1982 /** @hide */ 1983 TrackInfo(int type, MediaFormat format) { 1984 mTrackType = type; 1985 mFormat = format; 1986 } 1987 1988 /** 1989 * {@inheritDoc} 1990 */ 1991 @Override 1992 public int describeContents() { 1993 return 0; 1994 } 1995 1996 /** 1997 * {@inheritDoc} 1998 */ 1999 @Override 2000 public void writeToParcel(Parcel dest, int flags) { 2001 dest.writeInt(mTrackType); 2002 dest.writeString(getLanguage()); 2003 2004 if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 2005 dest.writeString(mFormat.getString(MediaFormat.KEY_MIME)); 2006 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_AUTOSELECT)); 2007 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_DEFAULT)); 2008 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE)); 2009 } 2010 } 2011 2012 @Override 2013 public String toString() { 2014 StringBuilder out = new StringBuilder(128); 2015 out.append(getClass().getName()); 2016 out.append('{'); 2017 switch (mTrackType) { 2018 case MEDIA_TRACK_TYPE_VIDEO: 2019 out.append("VIDEO"); 2020 break; 2021 case MEDIA_TRACK_TYPE_AUDIO: 2022 out.append("AUDIO"); 2023 break; 2024 case MEDIA_TRACK_TYPE_TIMEDTEXT: 2025 out.append("TIMEDTEXT"); 2026 break; 2027 case MEDIA_TRACK_TYPE_SUBTITLE: 2028 out.append("SUBTITLE"); 2029 break; 2030 default: 2031 out.append("UNKNOWN"); 2032 break; 2033 } 2034 out.append(", " + mFormat.toString()); 2035 out.append("}"); 2036 return out.toString(); 2037 } 2038 2039 /** 2040 * Used to read a TrackInfo from a Parcel. 2041 */ 2042 static final Parcelable.Creator<TrackInfo> CREATOR 2043 = new Parcelable.Creator<TrackInfo>() { 2044 @Override 2045 public TrackInfo createFromParcel(Parcel in) { 2046 return new TrackInfo(in); 2047 } 2048 2049 @Override 2050 public TrackInfo[] newArray(int size) { 2051 return new TrackInfo[size]; 2052 } 2053 }; 2054 2055 }; 2056 2057 // We would like domain specific classes with more informative names than the `first` and `second` 2058 // in generic Pair, but we would also like to avoid creating new/trivial classes. As a compromise 2059 // we document the meanings of `first` and `second` here: 2060 // 2061 // Pair.first - inband track index; non-null iff representing an inband track. 2062 // Pair.second - a SubtitleTrack registered with mSubtitleController; non-null iff representing 2063 // an inband subtitle track or any out-of-band track (subtitle or timedtext). 2064 private Vector<Pair<Integer, SubtitleTrack>> mIndexTrackPairs = new Vector<>(); 2065 private BitSet mInbandTrackIndices = new BitSet(); 2066 2067 /** 2068 * Returns an array of track information. 2069 * 2070 * @return Array of track info. The total number of tracks is the array length. 2071 * Must be called again if an external timed text source has been added after any of the 2072 * addTimedTextSource methods are called. 2073 * @throws IllegalStateException if it is called in an invalid state. 2074 */ 2075 public TrackInfo[] getTrackInfo() throws IllegalStateException { 2076 TrackInfo trackInfo[] = getInbandTrackInfo(); 2077 // add out-of-band tracks 2078 synchronized (mIndexTrackPairs) { 2079 TrackInfo allTrackInfo[] = new TrackInfo[mIndexTrackPairs.size()]; 2080 for (int i = 0; i < allTrackInfo.length; i++) { 2081 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2082 if (p.first != null) { 2083 // inband track 2084 allTrackInfo[i] = trackInfo[p.first]; 2085 } else { 2086 SubtitleTrack track = p.second; 2087 allTrackInfo[i] = new TrackInfo(track.getTrackType(), track.getFormat()); 2088 } 2089 } 2090 return allTrackInfo; 2091 } 2092 } 2093 2094 private TrackInfo[] getInbandTrackInfo() throws IllegalStateException { 2095 Parcel request = Parcel.obtain(); 2096 Parcel reply = Parcel.obtain(); 2097 try { 2098 request.writeInterfaceToken(IMEDIA_PLAYER); 2099 request.writeInt(INVOKE_ID_GET_TRACK_INFO); 2100 invoke(request, reply); 2101 TrackInfo trackInfo[] = reply.createTypedArray(TrackInfo.CREATOR); 2102 return trackInfo; 2103 } finally { 2104 request.recycle(); 2105 reply.recycle(); 2106 } 2107 } 2108 2109 /* Do not change these values without updating their counterparts 2110 * in include/media/stagefright/MediaDefs.h and media/libstagefright/MediaDefs.cpp! 2111 */ 2112 /** 2113 * MIME type for SubRip (SRT) container. Used in addTimedTextSource APIs. 2114 */ 2115 public static final String MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip"; 2116 2117 /** 2118 * MIME type for WebVTT subtitle data. 2119 * @hide 2120 */ 2121 public static final String MEDIA_MIMETYPE_TEXT_VTT = "text/vtt"; 2122 2123 /** 2124 * MIME type for CEA-608 closed caption data. 2125 * @hide 2126 */ 2127 public static final String MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608"; 2128 2129 /* 2130 * A helper function to check if the mime type is supported by media framework. 2131 */ 2132 private static boolean availableMimeTypeForExternalSource(String mimeType) { 2133 if (MEDIA_MIMETYPE_TEXT_SUBRIP.equals(mimeType)) { 2134 return true; 2135 } 2136 return false; 2137 } 2138 2139 private SubtitleController mSubtitleController; 2140 2141 /** @hide */ 2142 public void setSubtitleAnchor( 2143 SubtitleController controller, 2144 SubtitleController.Anchor anchor) { 2145 // TODO: create SubtitleController in MediaPlayer 2146 mSubtitleController = controller; 2147 mSubtitleController.setAnchor(anchor); 2148 } 2149 2150 /** 2151 * The private version of setSubtitleAnchor is used internally to set mSubtitleController if 2152 * necessary when clients don't provide their own SubtitleControllers using the public version 2153 * {@link #setSubtitleAnchor(SubtitleController, Anchor)} (e.g. {@link VideoView} provides one). 2154 */ 2155 private synchronized void setSubtitleAnchor() { 2156 if (mSubtitleController == null) { 2157 final HandlerThread thread = new HandlerThread("SetSubtitleAnchorThread"); 2158 thread.start(); 2159 Handler handler = new Handler(thread.getLooper()); 2160 handler.post(new Runnable() { 2161 @Override 2162 public void run() { 2163 Context context = ActivityThread.currentApplication(); 2164 mSubtitleController = new SubtitleController(context, mTimeProvider, MediaPlayer.this); 2165 mSubtitleController.setAnchor(new Anchor() { 2166 @Override 2167 public void setSubtitleWidget(RenderingWidget subtitleWidget) { 2168 } 2169 2170 @Override 2171 public Looper getSubtitleLooper() { 2172 return Looper.getMainLooper(); 2173 } 2174 }); 2175 thread.getLooper().quitSafely(); 2176 } 2177 }); 2178 try { 2179 thread.join(); 2180 } catch (InterruptedException e) { 2181 Thread.currentThread().interrupt(); 2182 Log.w(TAG, "failed to join SetSubtitleAnchorThread"); 2183 } 2184 } 2185 } 2186 2187 private int mSelectedSubtitleTrackIndex = -1; 2188 private Vector<InputStream> mOpenSubtitleSources; 2189 2190 private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() { 2191 @Override 2192 public void onSubtitleData(MediaPlayer mp, SubtitleData data) { 2193 int index = data.getTrackIndex(); 2194 synchronized (mIndexTrackPairs) { 2195 for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) { 2196 if (p.first != null && p.first == index && p.second != null) { 2197 // inband subtitle track that owns data 2198 SubtitleTrack track = p.second; 2199 track.onData(data); 2200 } 2201 } 2202 } 2203 } 2204 }; 2205 2206 /** @hide */ 2207 @Override 2208 public void onSubtitleTrackSelected(SubtitleTrack track) { 2209 if (mSelectedSubtitleTrackIndex >= 0) { 2210 try { 2211 selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, false); 2212 } catch (IllegalStateException e) { 2213 } 2214 mSelectedSubtitleTrackIndex = -1; 2215 } 2216 setOnSubtitleDataListener(null); 2217 if (track == null) { 2218 return; 2219 } 2220 2221 synchronized (mIndexTrackPairs) { 2222 for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) { 2223 if (p.first != null && p.second == track) { 2224 // inband subtitle track that is selected 2225 mSelectedSubtitleTrackIndex = p.first; 2226 break; 2227 } 2228 } 2229 } 2230 2231 if (mSelectedSubtitleTrackIndex >= 0) { 2232 try { 2233 selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true); 2234 } catch (IllegalStateException e) { 2235 } 2236 setOnSubtitleDataListener(mSubtitleDataListener); 2237 } 2238 // no need to select out-of-band tracks 2239 } 2240 2241 /** @hide */ 2242 public void addSubtitleSource(InputStream is, MediaFormat format) 2243 throws IllegalStateException 2244 { 2245 final InputStream fIs = is; 2246 final MediaFormat fFormat = format; 2247 2248 // Ensure all input streams are closed. It is also a handy 2249 // way to implement timeouts in the future. 2250 synchronized(mOpenSubtitleSources) { 2251 mOpenSubtitleSources.add(is); 2252 } 2253 2254 // process each subtitle in its own thread 2255 final HandlerThread thread = new HandlerThread("SubtitleReadThread", 2256 Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE); 2257 thread.start(); 2258 Handler handler = new Handler(thread.getLooper()); 2259 handler.post(new Runnable() { 2260 private int addTrack() { 2261 if (fIs == null || mSubtitleController == null) { 2262 return MEDIA_INFO_UNSUPPORTED_SUBTITLE; 2263 } 2264 2265 SubtitleTrack track = mSubtitleController.addTrack(fFormat); 2266 if (track == null) { 2267 return MEDIA_INFO_UNSUPPORTED_SUBTITLE; 2268 } 2269 2270 // TODO: do the conversion in the subtitle track 2271 Scanner scanner = new Scanner(fIs, "UTF-8"); 2272 String contents = scanner.useDelimiter("\\A").next(); 2273 synchronized(mOpenSubtitleSources) { 2274 mOpenSubtitleSources.remove(fIs); 2275 } 2276 scanner.close(); 2277 synchronized (mIndexTrackPairs) { 2278 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track)); 2279 } 2280 track.onData(contents.getBytes(), true /* eos */, ~0 /* runID: keep forever */); 2281 return MEDIA_INFO_EXTERNAL_METADATA_UPDATE; 2282 } 2283 2284 public void run() { 2285 int res = addTrack(); 2286 if (mEventHandler != null) { 2287 Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null); 2288 mEventHandler.sendMessage(m); 2289 } 2290 thread.getLooper().quitSafely(); 2291 } 2292 }); 2293 } 2294 2295 private void scanInternalSubtitleTracks() { 2296 if (mSubtitleController == null) { 2297 Log.e(TAG, "Should have subtitle controller already set"); 2298 return; 2299 } 2300 2301 populateInbandTracks(); 2302 2303 if (mSubtitleController != null) { 2304 mSubtitleController.selectDefaultTrack(); 2305 } 2306 } 2307 2308 private void populateInbandTracks() { 2309 TrackInfo[] tracks = getInbandTrackInfo(); 2310 synchronized (mIndexTrackPairs) { 2311 for (int i = 0; i < tracks.length; i++) { 2312 if (mInbandTrackIndices.get(i)) { 2313 continue; 2314 } else { 2315 mInbandTrackIndices.set(i); 2316 } 2317 2318 // newly appeared inband track 2319 if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) { 2320 SubtitleTrack track = mSubtitleController.addTrack( 2321 tracks[i].getFormat()); 2322 mIndexTrackPairs.add(Pair.create(i, track)); 2323 } else { 2324 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(i, null)); 2325 } 2326 } 2327 } 2328 } 2329 2330 /* TODO: Limit the total number of external timed text source to a reasonable number. 2331 */ 2332 /** 2333 * Adds an external timed text source file. 2334 * 2335 * Currently supported format is SubRip with the file extension .srt, case insensitive. 2336 * Note that a single external timed text source may contain multiple tracks in it. 2337 * One can find the total number of available tracks using {@link #getTrackInfo()} to see what 2338 * additional tracks become available after this method call. 2339 * 2340 * @param path The file path of external timed text source file. 2341 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2342 * @throws IOException if the file cannot be accessed or is corrupted. 2343 * @throws IllegalArgumentException if the mimeType is not supported. 2344 * @throws IllegalStateException if called in an invalid state. 2345 */ 2346 public void addTimedTextSource(String path, String mimeType) 2347 throws IOException, IllegalArgumentException, IllegalStateException { 2348 if (!availableMimeTypeForExternalSource(mimeType)) { 2349 final String msg = "Illegal mimeType for timed text source: " + mimeType; 2350 throw new IllegalArgumentException(msg); 2351 } 2352 2353 File file = new File(path); 2354 if (file.exists()) { 2355 FileInputStream is = new FileInputStream(file); 2356 FileDescriptor fd = is.getFD(); 2357 addTimedTextSource(fd, mimeType); 2358 is.close(); 2359 } else { 2360 // We do not support the case where the path is not a file. 2361 throw new IOException(path); 2362 } 2363 } 2364 2365 /** 2366 * Adds an external timed text source file (Uri). 2367 * 2368 * Currently supported format is SubRip with the file extension .srt, case insensitive. 2369 * Note that a single external timed text source may contain multiple tracks in it. 2370 * One can find the total number of available tracks using {@link #getTrackInfo()} to see what 2371 * additional tracks become available after this method call. 2372 * 2373 * @param context the Context to use when resolving the Uri 2374 * @param uri the Content URI of the data you want to play 2375 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2376 * @throws IOException if the file cannot be accessed or is corrupted. 2377 * @throws IllegalArgumentException if the mimeType is not supported. 2378 * @throws IllegalStateException if called in an invalid state. 2379 */ 2380 public void addTimedTextSource(Context context, Uri uri, String mimeType) 2381 throws IOException, IllegalArgumentException, IllegalStateException { 2382 String scheme = uri.getScheme(); 2383 if(scheme == null || scheme.equals("file")) { 2384 addTimedTextSource(uri.getPath(), mimeType); 2385 return; 2386 } 2387 2388 AssetFileDescriptor fd = null; 2389 try { 2390 ContentResolver resolver = context.getContentResolver(); 2391 fd = resolver.openAssetFileDescriptor(uri, "r"); 2392 if (fd == null) { 2393 return; 2394 } 2395 addTimedTextSource(fd.getFileDescriptor(), mimeType); 2396 return; 2397 } catch (SecurityException ex) { 2398 } catch (IOException ex) { 2399 } finally { 2400 if (fd != null) { 2401 fd.close(); 2402 } 2403 } 2404 } 2405 2406 /** 2407 * Adds an external timed text source file (FileDescriptor). 2408 * 2409 * It is the caller's responsibility to close the file descriptor. 2410 * It is safe to do so as soon as this call returns. 2411 * 2412 * Currently supported format is SubRip. Note that a single external timed text source may 2413 * contain multiple tracks in it. One can find the total number of available tracks 2414 * using {@link #getTrackInfo()} to see what additional tracks become available 2415 * after this method call. 2416 * 2417 * @param fd the FileDescriptor for the file you want to play 2418 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2419 * @throws IllegalArgumentException if the mimeType is not supported. 2420 * @throws IllegalStateException if called in an invalid state. 2421 */ 2422 public void addTimedTextSource(FileDescriptor fd, String mimeType) 2423 throws IllegalArgumentException, IllegalStateException { 2424 // intentionally less than LONG_MAX 2425 addTimedTextSource(fd, 0, 0x7ffffffffffffffL, mimeType); 2426 } 2427 2428 /** 2429 * Adds an external timed text file (FileDescriptor). 2430 * 2431 * It is the caller's responsibility to close the file descriptor. 2432 * It is safe to do so as soon as this call returns. 2433 * 2434 * Currently supported format is SubRip. Note that a single external timed text source may 2435 * contain multiple tracks in it. One can find the total number of available tracks 2436 * using {@link #getTrackInfo()} to see what additional tracks become available 2437 * after this method call. 2438 * 2439 * @param fd the FileDescriptor for the file you want to play 2440 * @param offset the offset into the file where the data to be played starts, in bytes 2441 * @param length the length in bytes of the data to be played 2442 * @param mime The mime type of the file. Must be one of the mime types listed above. 2443 * @throws IllegalArgumentException if the mimeType is not supported. 2444 * @throws IllegalStateException if called in an invalid state. 2445 */ 2446 public void addTimedTextSource(FileDescriptor fd, long offset, long length, String mime) 2447 throws IllegalArgumentException, IllegalStateException { 2448 if (!availableMimeTypeForExternalSource(mime)) { 2449 throw new IllegalArgumentException("Illegal mimeType for timed text source: " + mime); 2450 } 2451 2452 FileDescriptor fd2; 2453 try { 2454 fd2 = Libcore.os.dup(fd); 2455 } catch (ErrnoException ex) { 2456 Log.e(TAG, ex.getMessage(), ex); 2457 throw new RuntimeException(ex); 2458 } 2459 2460 final MediaFormat fFormat = new MediaFormat(); 2461 fFormat.setString(MediaFormat.KEY_MIME, mime); 2462 fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1); 2463 2464 // A MediaPlayer created by a VideoView should already have its mSubtitleController set. 2465 if (mSubtitleController == null) { 2466 setSubtitleAnchor(); 2467 } 2468 2469 if (!mSubtitleController.hasRendererFor(fFormat)) { 2470 // test and add not atomic 2471 Context context = ActivityThread.currentApplication(); 2472 mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler)); 2473 } 2474 final SubtitleTrack track = mSubtitleController.addTrack(fFormat); 2475 synchronized (mIndexTrackPairs) { 2476 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track)); 2477 } 2478 2479 final FileDescriptor fd3 = fd2; 2480 final long offset2 = offset; 2481 final long length2 = length; 2482 final HandlerThread thread = new HandlerThread( 2483 "TimedTextReadThread", 2484 Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE); 2485 thread.start(); 2486 Handler handler = new Handler(thread.getLooper()); 2487 handler.post(new Runnable() { 2488 private int addTrack() { 2489 InputStream is = null; 2490 final ByteArrayOutputStream bos = new ByteArrayOutputStream(); 2491 try { 2492 Libcore.os.lseek(fd3, offset2, OsConstants.SEEK_SET); 2493 byte[] buffer = new byte[4096]; 2494 for (long total = 0; total < length2;) { 2495 int bytesToRead = (int) Math.min(buffer.length, length2 - total); 2496 int bytes = IoBridge.read(fd3, buffer, 0, bytesToRead); 2497 if (bytes < 0) { 2498 break; 2499 } else { 2500 bos.write(buffer, 0, bytes); 2501 total += bytes; 2502 } 2503 } 2504 track.onData(bos.toByteArray(), true /* eos */, ~0 /* runID: keep forever */); 2505 return MEDIA_INFO_EXTERNAL_METADATA_UPDATE; 2506 } catch (Exception e) { 2507 Log.e(TAG, e.getMessage(), e); 2508 return MEDIA_INFO_TIMED_TEXT_ERROR; 2509 } finally { 2510 if (is != null) { 2511 try { 2512 is.close(); 2513 } catch (IOException e) { 2514 Log.e(TAG, e.getMessage(), e); 2515 } 2516 } 2517 } 2518 } 2519 2520 public void run() { 2521 int res = addTrack(); 2522 if (mEventHandler != null) { 2523 Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null); 2524 mEventHandler.sendMessage(m); 2525 } 2526 thread.getLooper().quitSafely(); 2527 } 2528 }); 2529 } 2530 2531 /** 2532 * Returns the index of the audio, video, or subtitle track currently selected for playback, 2533 * The return value is an index into the array returned by {@link #getTrackInfo()}, and can 2534 * be used in calls to {@link #selectTrack(int)} or {@link #deselectTrack(int)}. 2535 * 2536 * @param trackType should be one of {@link TrackInfo#MEDIA_TRACK_TYPE_VIDEO}, 2537 * {@link TrackInfo#MEDIA_TRACK_TYPE_AUDIO}, or 2538 * {@link TrackInfo#MEDIA_TRACK_TYPE_SUBTITLE} 2539 * @return index of the audio, video, or subtitle track currently selected for playback; 2540 * a negative integer is returned when there is no selected track for {@code trackType} or 2541 * when {@code trackType} is not one of audio, video, or subtitle. 2542 * @throws IllegalStateException if called after {@link #release()} 2543 * 2544 * @see #getTrackInfo() 2545 * @see #selectTrack(int) 2546 * @see #deselectTrack(int) 2547 */ 2548 public int getSelectedTrack(int trackType) throws IllegalStateException { 2549 if (mSubtitleController != null 2550 && (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE 2551 || trackType == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT)) { 2552 SubtitleTrack subtitleTrack = mSubtitleController.getSelectedTrack(); 2553 if (subtitleTrack != null) { 2554 synchronized (mIndexTrackPairs) { 2555 for (int i = 0; i < mIndexTrackPairs.size(); i++) { 2556 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2557 if (p.second == subtitleTrack && subtitleTrack.getTrackType() == trackType) { 2558 return i; 2559 } 2560 } 2561 } 2562 } 2563 } 2564 2565 Parcel request = Parcel.obtain(); 2566 Parcel reply = Parcel.obtain(); 2567 try { 2568 request.writeInterfaceToken(IMEDIA_PLAYER); 2569 request.writeInt(INVOKE_ID_GET_SELECTED_TRACK); 2570 request.writeInt(trackType); 2571 invoke(request, reply); 2572 int inbandTrackIndex = reply.readInt(); 2573 synchronized (mIndexTrackPairs) { 2574 for (int i = 0; i < mIndexTrackPairs.size(); i++) { 2575 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2576 if (p.first != null && p.first == inbandTrackIndex) { 2577 return i; 2578 } 2579 } 2580 } 2581 return -1; 2582 } finally { 2583 request.recycle(); 2584 reply.recycle(); 2585 } 2586 } 2587 2588 /** 2589 * Selects a track. 2590 * <p> 2591 * If a MediaPlayer is in invalid state, it throws an IllegalStateException exception. 2592 * If a MediaPlayer is in <em>Started</em> state, the selected track is presented immediately. 2593 * If a MediaPlayer is not in Started state, it just marks the track to be played. 2594 * </p> 2595 * <p> 2596 * In any valid state, if it is called multiple times on the same type of track (ie. Video, 2597 * Audio, Timed Text), the most recent one will be chosen. 2598 * </p> 2599 * <p> 2600 * The first audio and video tracks are selected by default if available, even though 2601 * this method is not called. However, no timed text track will be selected until 2602 * this function is called. 2603 * </p> 2604 * <p> 2605 * Currently, only timed text tracks or audio tracks can be selected via this method. 2606 * In addition, the support for selecting an audio track at runtime is pretty limited 2607 * in that an audio track can only be selected in the <em>Prepared</em> state. 2608 * </p> 2609 * @param index the index of the track to be selected. The valid range of the index 2610 * is 0..total number of track - 1. The total number of tracks as well as the type of 2611 * each individual track can be found by calling {@link #getTrackInfo()} method. 2612 * @throws IllegalStateException if called in an invalid state. 2613 * 2614 * @see android.media.MediaPlayer#getTrackInfo 2615 */ 2616 public void selectTrack(int index) throws IllegalStateException { 2617 selectOrDeselectTrack(index, true /* select */); 2618 } 2619 2620 /** 2621 * Deselect a track. 2622 * <p> 2623 * Currently, the track must be a timed text track and no audio or video tracks can be 2624 * deselected. If the timed text track identified by index has not been 2625 * selected before, it throws an exception. 2626 * </p> 2627 * @param index the index of the track to be deselected. The valid range of the index 2628 * is 0..total number of tracks - 1. The total number of tracks as well as the type of 2629 * each individual track can be found by calling {@link #getTrackInfo()} method. 2630 * @throws IllegalStateException if called in an invalid state. 2631 * 2632 * @see android.media.MediaPlayer#getTrackInfo 2633 */ 2634 public void deselectTrack(int index) throws IllegalStateException { 2635 selectOrDeselectTrack(index, false /* select */); 2636 } 2637 2638 private void selectOrDeselectTrack(int index, boolean select) 2639 throws IllegalStateException { 2640 // handle subtitle track through subtitle controller 2641 populateInbandTracks(); 2642 2643 Pair<Integer,SubtitleTrack> p = null; 2644 try { 2645 p = mIndexTrackPairs.get(index); 2646 } catch (ArrayIndexOutOfBoundsException e) { 2647 // ignore bad index 2648 return; 2649 } 2650 2651 SubtitleTrack track = p.second; 2652 if (track == null) { 2653 // inband (de)select 2654 selectOrDeselectInbandTrack(p.first, select); 2655 return; 2656 } 2657 2658 if (mSubtitleController == null) { 2659 return; 2660 } 2661 2662 if (!select) { 2663 // out-of-band deselect 2664 if (mSubtitleController.getSelectedTrack() == track) { 2665 mSubtitleController.selectTrack(null); 2666 } else { 2667 Log.w(TAG, "trying to deselect track that was not selected"); 2668 } 2669 return; 2670 } 2671 2672 // out-of-band select 2673 if (track.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT) { 2674 int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT); 2675 synchronized (mIndexTrackPairs) { 2676 if (ttIndex >= 0 && ttIndex < mIndexTrackPairs.size()) { 2677 Pair<Integer,SubtitleTrack> p2 = mIndexTrackPairs.get(ttIndex); 2678 if (p2.first != null && p2.second == null) { 2679 // deselect inband counterpart 2680 selectOrDeselectInbandTrack(p2.first, false); 2681 } 2682 } 2683 } 2684 } 2685 mSubtitleController.selectTrack(track); 2686 } 2687 2688 private void selectOrDeselectInbandTrack(int index, boolean select) 2689 throws IllegalStateException { 2690 Parcel request = Parcel.obtain(); 2691 Parcel reply = Parcel.obtain(); 2692 try { 2693 request.writeInterfaceToken(IMEDIA_PLAYER); 2694 request.writeInt(select? INVOKE_ID_SELECT_TRACK: INVOKE_ID_DESELECT_TRACK); 2695 request.writeInt(index); 2696 invoke(request, reply); 2697 } finally { 2698 request.recycle(); 2699 reply.recycle(); 2700 } 2701 } 2702 2703 2704 /** 2705 * @param reply Parcel with audio/video duration info for battery 2706 tracking usage 2707 * @return The status code. 2708 * {@hide} 2709 */ 2710 public native static int native_pullBatteryData(Parcel reply); 2711 2712 /** 2713 * Sets the target UDP re-transmit endpoint for the low level player. 2714 * Generally, the address portion of the endpoint is an IP multicast 2715 * address, although a unicast address would be equally valid. When a valid 2716 * retransmit endpoint has been set, the media player will not decode and 2717 * render the media presentation locally. Instead, the player will attempt 2718 * to re-multiplex its media data using the Android@Home RTP profile and 2719 * re-transmit to the target endpoint. Receiver devices (which may be 2720 * either the same as the transmitting device or different devices) may 2721 * instantiate, prepare, and start a receiver player using a setDataSource 2722 * URL of the form... 2723 * 2724 * aahRX://<multicastIP>:<port> 2725 * 2726 * to receive, decode and render the re-transmitted content. 2727 * 2728 * setRetransmitEndpoint may only be called before setDataSource has been 2729 * called; while the player is in the Idle state. 2730 * 2731 * @param endpoint the address and UDP port of the re-transmission target or 2732 * null if no re-transmission is to be performed. 2733 * @throws IllegalStateException if it is called in an invalid state 2734 * @throws IllegalArgumentException if the retransmit endpoint is supplied, 2735 * but invalid. 2736 * 2737 * {@hide} pending API council 2738 */ 2739 public void setRetransmitEndpoint(InetSocketAddress endpoint) 2740 throws IllegalStateException, IllegalArgumentException 2741 { 2742 String addrString = null; 2743 int port = 0; 2744 2745 if (null != endpoint) { 2746 addrString = endpoint.getAddress().getHostAddress(); 2747 port = endpoint.getPort(); 2748 } 2749 2750 int ret = native_setRetransmitEndpoint(addrString, port); 2751 if (ret != 0) { 2752 throw new IllegalArgumentException("Illegal re-transmit endpoint; native ret " + ret); 2753 } 2754 } 2755 2756 private native final int native_setRetransmitEndpoint(String addrString, int port); 2757 2758 @Override 2759 protected void finalize() { native_finalize(); } 2760 2761 /* Do not change these values without updating their counterparts 2762 * in include/media/mediaplayer.h! 2763 */ 2764 private static final int MEDIA_NOP = 0; // interface test message 2765 private static final int MEDIA_PREPARED = 1; 2766 private static final int MEDIA_PLAYBACK_COMPLETE = 2; 2767 private static final int MEDIA_BUFFERING_UPDATE = 3; 2768 private static final int MEDIA_SEEK_COMPLETE = 4; 2769 private static final int MEDIA_SET_VIDEO_SIZE = 5; 2770 private static final int MEDIA_STARTED = 6; 2771 private static final int MEDIA_PAUSED = 7; 2772 private static final int MEDIA_STOPPED = 8; 2773 private static final int MEDIA_SKIPPED = 9; 2774 private static final int MEDIA_TIMED_TEXT = 99; 2775 private static final int MEDIA_ERROR = 100; 2776 private static final int MEDIA_INFO = 200; 2777 private static final int MEDIA_SUBTITLE_DATA = 201; 2778 private static final int MEDIA_META_DATA = 202; 2779 2780 private TimeProvider mTimeProvider; 2781 2782 /** @hide */ 2783 public MediaTimeProvider getMediaTimeProvider() { 2784 if (mTimeProvider == null) { 2785 mTimeProvider = new TimeProvider(this); 2786 } 2787 return mTimeProvider; 2788 } 2789 2790 private class EventHandler extends Handler 2791 { 2792 private MediaPlayer mMediaPlayer; 2793 2794 public EventHandler(MediaPlayer mp, Looper looper) { 2795 super(looper); 2796 mMediaPlayer = mp; 2797 } 2798 2799 @Override 2800 public void handleMessage(Message msg) { 2801 if (mMediaPlayer.mNativeContext == 0) { 2802 Log.w(TAG, "mediaplayer went away with unhandled events"); 2803 return; 2804 } 2805 switch(msg.what) { 2806 case MEDIA_PREPARED: 2807 scanInternalSubtitleTracks(); 2808 if (mOnPreparedListener != null) 2809 mOnPreparedListener.onPrepared(mMediaPlayer); 2810 return; 2811 2812 case MEDIA_PLAYBACK_COMPLETE: 2813 if (mOnCompletionListener != null) 2814 mOnCompletionListener.onCompletion(mMediaPlayer); 2815 stayAwake(false); 2816 return; 2817 2818 case MEDIA_STOPPED: 2819 { 2820 TimeProvider timeProvider = mTimeProvider; 2821 if (timeProvider != null) { 2822 timeProvider.onStopped(); 2823 } 2824 } 2825 break; 2826 2827 case MEDIA_STARTED: 2828 case MEDIA_PAUSED: 2829 { 2830 TimeProvider timeProvider = mTimeProvider; 2831 if (timeProvider != null) { 2832 timeProvider.onPaused(msg.what == MEDIA_PAUSED); 2833 } 2834 } 2835 break; 2836 2837 case MEDIA_BUFFERING_UPDATE: 2838 if (mOnBufferingUpdateListener != null) 2839 mOnBufferingUpdateListener.onBufferingUpdate(mMediaPlayer, msg.arg1); 2840 return; 2841 2842 case MEDIA_SEEK_COMPLETE: 2843 if (mOnSeekCompleteListener != null) { 2844 mOnSeekCompleteListener.onSeekComplete(mMediaPlayer); 2845 } 2846 // fall through 2847 2848 case MEDIA_SKIPPED: 2849 { 2850 TimeProvider timeProvider = mTimeProvider; 2851 if (timeProvider != null) { 2852 timeProvider.onSeekComplete(mMediaPlayer); 2853 } 2854 } 2855 return; 2856 2857 case MEDIA_SET_VIDEO_SIZE: 2858 if (mOnVideoSizeChangedListener != null) { 2859 mOnVideoSizeChangedListener.onVideoSizeChanged( 2860 mMediaPlayer, msg.arg1, msg.arg2); 2861 } 2862 return; 2863 2864 case MEDIA_ERROR: 2865 Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")"); 2866 boolean error_was_handled = false; 2867 if (mOnErrorListener != null) { 2868 error_was_handled = mOnErrorListener.onError(mMediaPlayer, msg.arg1, msg.arg2); 2869 } 2870 if (mOnCompletionListener != null && ! error_was_handled) { 2871 mOnCompletionListener.onCompletion(mMediaPlayer); 2872 } 2873 stayAwake(false); 2874 return; 2875 2876 case MEDIA_INFO: 2877 switch (msg.arg1) { 2878 case MEDIA_INFO_VIDEO_TRACK_LAGGING: 2879 Log.i(TAG, "Info (" + msg.arg1 + "," + msg.arg2 + ")"); 2880 break; 2881 case MEDIA_INFO_METADATA_UPDATE: 2882 scanInternalSubtitleTracks(); 2883 // fall through 2884 2885 case MEDIA_INFO_EXTERNAL_METADATA_UPDATE: 2886 msg.arg1 = MEDIA_INFO_METADATA_UPDATE; 2887 // update default track selection 2888 if (mSubtitleController != null) { 2889 mSubtitleController.selectDefaultTrack(); 2890 } 2891 break; 2892 case MEDIA_INFO_BUFFERING_START: 2893 case MEDIA_INFO_BUFFERING_END: 2894 TimeProvider timeProvider = mTimeProvider; 2895 if (timeProvider != null) { 2896 timeProvider.onBuffering(msg.arg1 == MEDIA_INFO_BUFFERING_START); 2897 } 2898 break; 2899 } 2900 2901 if (mOnInfoListener != null) { 2902 mOnInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2); 2903 } 2904 // No real default action so far. 2905 return; 2906 case MEDIA_TIMED_TEXT: 2907 if (mOnTimedTextListener == null) 2908 return; 2909 if (msg.obj == null) { 2910 mOnTimedTextListener.onTimedText(mMediaPlayer, null); 2911 } else { 2912 if (msg.obj instanceof Parcel) { 2913 Parcel parcel = (Parcel)msg.obj; 2914 TimedText text = new TimedText(parcel); 2915 parcel.recycle(); 2916 mOnTimedTextListener.onTimedText(mMediaPlayer, text); 2917 } 2918 } 2919 return; 2920 2921 case MEDIA_SUBTITLE_DATA: 2922 if (mOnSubtitleDataListener == null) { 2923 return; 2924 } 2925 if (msg.obj instanceof Parcel) { 2926 Parcel parcel = (Parcel) msg.obj; 2927 SubtitleData data = new SubtitleData(parcel); 2928 parcel.recycle(); 2929 mOnSubtitleDataListener.onSubtitleData(mMediaPlayer, data); 2930 } 2931 return; 2932 2933 case MEDIA_META_DATA: 2934 if (mOnTimedMetaDataAvailableListener == null) { 2935 return; 2936 } 2937 if (msg.obj instanceof Parcel) { 2938 Parcel parcel = (Parcel) msg.obj; 2939 TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel); 2940 parcel.recycle(); 2941 mOnTimedMetaDataAvailableListener.onTimedMetaDataAvailable(mMediaPlayer, data); 2942 } 2943 return; 2944 2945 case MEDIA_NOP: // interface test message - ignore 2946 break; 2947 2948 default: 2949 Log.e(TAG, "Unknown message type " + msg.what); 2950 return; 2951 } 2952 } 2953 } 2954 2955 /* 2956 * Called from native code when an interesting event happens. This method 2957 * just uses the EventHandler system to post the event back to the main app thread. 2958 * We use a weak reference to the original MediaPlayer object so that the native 2959 * code is safe from the object disappearing from underneath it. (This is 2960 * the cookie passed to native_setup().) 2961 */ 2962 private static void postEventFromNative(Object mediaplayer_ref, 2963 int what, int arg1, int arg2, Object obj) 2964 { 2965 MediaPlayer mp = (MediaPlayer)((WeakReference)mediaplayer_ref).get(); 2966 if (mp == null) { 2967 return; 2968 } 2969 2970 if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) { 2971 // this acquires the wakelock if needed, and sets the client side state 2972 mp.start(); 2973 } 2974 if (mp.mEventHandler != null) { 2975 Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj); 2976 mp.mEventHandler.sendMessage(m); 2977 } 2978 } 2979 2980 /** 2981 * Interface definition for a callback to be invoked when the media 2982 * source is ready for playback. 2983 */ 2984 public interface OnPreparedListener 2985 { 2986 /** 2987 * Called when the media file is ready for playback. 2988 * 2989 * @param mp the MediaPlayer that is ready for playback 2990 */ 2991 void onPrepared(MediaPlayer mp); 2992 } 2993 2994 /** 2995 * Register a callback to be invoked when the media source is ready 2996 * for playback. 2997 * 2998 * @param listener the callback that will be run 2999 */ 3000 public void setOnPreparedListener(OnPreparedListener listener) 3001 { 3002 mOnPreparedListener = listener; 3003 } 3004 3005 private OnPreparedListener mOnPreparedListener; 3006 3007 /** 3008 * Interface definition for a callback to be invoked when playback of 3009 * a media source has completed. 3010 */ 3011 public interface OnCompletionListener 3012 { 3013 /** 3014 * Called when the end of a media source is reached during playback. 3015 * 3016 * @param mp the MediaPlayer that reached the end of the file 3017 */ 3018 void onCompletion(MediaPlayer mp); 3019 } 3020 3021 /** 3022 * Register a callback to be invoked when the end of a media source 3023 * has been reached during playback. 3024 * 3025 * @param listener the callback that will be run 3026 */ 3027 public void setOnCompletionListener(OnCompletionListener listener) 3028 { 3029 mOnCompletionListener = listener; 3030 } 3031 3032 private OnCompletionListener mOnCompletionListener; 3033 3034 /** 3035 * Interface definition of a callback to be invoked indicating buffering 3036 * status of a media resource being streamed over the network. 3037 */ 3038 public interface OnBufferingUpdateListener 3039 { 3040 /** 3041 * Called to update status in buffering a media stream received through 3042 * progressive HTTP download. The received buffering percentage 3043 * indicates how much of the content has been buffered or played. 3044 * For example a buffering update of 80 percent when half the content 3045 * has already been played indicates that the next 30 percent of the 3046 * content to play has been buffered. 3047 * 3048 * @param mp the MediaPlayer the update pertains to 3049 * @param percent the percentage (0-100) of the content 3050 * that has been buffered or played thus far 3051 */ 3052 void onBufferingUpdate(MediaPlayer mp, int percent); 3053 } 3054 3055 /** 3056 * Register a callback to be invoked when the status of a network 3057 * stream's buffer has changed. 3058 * 3059 * @param listener the callback that will be run. 3060 */ 3061 public void setOnBufferingUpdateListener(OnBufferingUpdateListener listener) 3062 { 3063 mOnBufferingUpdateListener = listener; 3064 } 3065 3066 private OnBufferingUpdateListener mOnBufferingUpdateListener; 3067 3068 /** 3069 * Interface definition of a callback to be invoked indicating 3070 * the completion of a seek operation. 3071 */ 3072 public interface OnSeekCompleteListener 3073 { 3074 /** 3075 * Called to indicate the completion of a seek operation. 3076 * 3077 * @param mp the MediaPlayer that issued the seek operation 3078 */ 3079 public void onSeekComplete(MediaPlayer mp); 3080 } 3081 3082 /** 3083 * Register a callback to be invoked when a seek operation has been 3084 * completed. 3085 * 3086 * @param listener the callback that will be run 3087 */ 3088 public void setOnSeekCompleteListener(OnSeekCompleteListener listener) 3089 { 3090 mOnSeekCompleteListener = listener; 3091 } 3092 3093 private OnSeekCompleteListener mOnSeekCompleteListener; 3094 3095 /** 3096 * Interface definition of a callback to be invoked when the 3097 * video size is first known or updated 3098 */ 3099 public interface OnVideoSizeChangedListener 3100 { 3101 /** 3102 * Called to indicate the video size 3103 * 3104 * The video size (width and height) could be 0 if there was no video, 3105 * no display surface was set, or the value was not determined yet. 3106 * 3107 * @param mp the MediaPlayer associated with this callback 3108 * @param width the width of the video 3109 * @param height the height of the video 3110 */ 3111 public void onVideoSizeChanged(MediaPlayer mp, int width, int height); 3112 } 3113 3114 /** 3115 * Register a callback to be invoked when the video size is 3116 * known or updated. 3117 * 3118 * @param listener the callback that will be run 3119 */ 3120 public void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener) 3121 { 3122 mOnVideoSizeChangedListener = listener; 3123 } 3124 3125 private OnVideoSizeChangedListener mOnVideoSizeChangedListener; 3126 3127 /** 3128 * Interface definition of a callback to be invoked when a 3129 * timed text is available for display. 3130 */ 3131 public interface OnTimedTextListener 3132 { 3133 /** 3134 * Called to indicate an avaliable timed text 3135 * 3136 * @param mp the MediaPlayer associated with this callback 3137 * @param text the timed text sample which contains the text 3138 * needed to be displayed and the display format. 3139 */ 3140 public void onTimedText(MediaPlayer mp, TimedText text); 3141 } 3142 3143 /** 3144 * Register a callback to be invoked when a timed text is available 3145 * for display. 3146 * 3147 * @param listener the callback that will be run 3148 */ 3149 public void setOnTimedTextListener(OnTimedTextListener listener) 3150 { 3151 mOnTimedTextListener = listener; 3152 } 3153 3154 private OnTimedTextListener mOnTimedTextListener; 3155 3156 /** 3157 * Interface definition of a callback to be invoked when a 3158 * track has data available. 3159 * 3160 * @hide 3161 */ 3162 public interface OnSubtitleDataListener 3163 { 3164 public void onSubtitleData(MediaPlayer mp, SubtitleData data); 3165 } 3166 3167 /** 3168 * Register a callback to be invoked when a track has data available. 3169 * 3170 * @param listener the callback that will be run 3171 * 3172 * @hide 3173 */ 3174 public void setOnSubtitleDataListener(OnSubtitleDataListener listener) 3175 { 3176 mOnSubtitleDataListener = listener; 3177 } 3178 3179 private OnSubtitleDataListener mOnSubtitleDataListener; 3180 3181 /** 3182 * Interface definition of a callback to be invoked when a 3183 * track has timed metadata available. 3184 * 3185 * @see MediaPlayer#setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener) 3186 */ 3187 public interface OnTimedMetaDataAvailableListener 3188 { 3189 /** 3190 * Called to indicate avaliable timed metadata 3191 * <p> 3192 * This method will be called as timed metadata is extracted from the media, 3193 * in the same order as it occurs in the media. The timing of this event is 3194 * not controlled by the associated timestamp. 3195 * 3196 * @param mp the MediaPlayer associated with this callback 3197 * @param data the timed metadata sample associated with this event 3198 */ 3199 public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData data); 3200 } 3201 3202 /** 3203 * Register a callback to be invoked when a selected track has timed metadata available. 3204 * <p> 3205 * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates 3206 * {@link TimedMetaData}. 3207 * 3208 * @see MediaPlayer#selectTrack(int) 3209 * @see MediaPlayer.OnTimedMetaDataAvailableListener 3210 * @see TimedMetaData 3211 * 3212 * @param listener the callback that will be run 3213 */ 3214 public void setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener listener) 3215 { 3216 mOnTimedMetaDataAvailableListener = listener; 3217 } 3218 3219 private OnTimedMetaDataAvailableListener mOnTimedMetaDataAvailableListener; 3220 3221 /* Do not change these values without updating their counterparts 3222 * in include/media/mediaplayer.h! 3223 */ 3224 /** Unspecified media player error. 3225 * @see android.media.MediaPlayer.OnErrorListener 3226 */ 3227 public static final int MEDIA_ERROR_UNKNOWN = 1; 3228 3229 /** Media server died. In this case, the application must release the 3230 * MediaPlayer object and instantiate a new one. 3231 * @see android.media.MediaPlayer.OnErrorListener 3232 */ 3233 public static final int MEDIA_ERROR_SERVER_DIED = 100; 3234 3235 /** The video is streamed and its container is not valid for progressive 3236 * playback i.e the video's index (e.g moov atom) is not at the start of the 3237 * file. 3238 * @see android.media.MediaPlayer.OnErrorListener 3239 */ 3240 public static final int MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200; 3241 3242 /** File or network related operation errors. */ 3243 public static final int MEDIA_ERROR_IO = -1004; 3244 /** Bitstream is not conforming to the related coding standard or file spec. */ 3245 public static final int MEDIA_ERROR_MALFORMED = -1007; 3246 /** Bitstream is conforming to the related coding standard or file spec, but 3247 * the media framework does not support the feature. */ 3248 public static final int MEDIA_ERROR_UNSUPPORTED = -1010; 3249 /** Some operation takes too long to complete, usually more than 3-5 seconds. */ 3250 public static final int MEDIA_ERROR_TIMED_OUT = -110; 3251 3252 /** 3253 * Interface definition of a callback to be invoked when there 3254 * has been an error during an asynchronous operation (other errors 3255 * will throw exceptions at method call time). 3256 */ 3257 public interface OnErrorListener 3258 { 3259 /** 3260 * Called to indicate an error. 3261 * 3262 * @param mp the MediaPlayer the error pertains to 3263 * @param what the type of error that has occurred: 3264 * <ul> 3265 * <li>{@link #MEDIA_ERROR_UNKNOWN} 3266 * <li>{@link #MEDIA_ERROR_SERVER_DIED} 3267 * </ul> 3268 * @param extra an extra code, specific to the error. Typically 3269 * implementation dependent. 3270 * <ul> 3271 * <li>{@link #MEDIA_ERROR_IO} 3272 * <li>{@link #MEDIA_ERROR_MALFORMED} 3273 * <li>{@link #MEDIA_ERROR_UNSUPPORTED} 3274 * <li>{@link #MEDIA_ERROR_TIMED_OUT} 3275 * </ul> 3276 * @return True if the method handled the error, false if it didn't. 3277 * Returning false, or not having an OnErrorListener at all, will 3278 * cause the OnCompletionListener to be called. 3279 */ 3280 boolean onError(MediaPlayer mp, int what, int extra); 3281 } 3282 3283 /** 3284 * Register a callback to be invoked when an error has happened 3285 * during an asynchronous operation. 3286 * 3287 * @param listener the callback that will be run 3288 */ 3289 public void setOnErrorListener(OnErrorListener listener) 3290 { 3291 mOnErrorListener = listener; 3292 } 3293 3294 private OnErrorListener mOnErrorListener; 3295 3296 3297 /* Do not change these values without updating their counterparts 3298 * in include/media/mediaplayer.h! 3299 */ 3300 /** Unspecified media player info. 3301 * @see android.media.MediaPlayer.OnInfoListener 3302 */ 3303 public static final int MEDIA_INFO_UNKNOWN = 1; 3304 3305 /** The player was started because it was used as the next player for another 3306 * player, which just completed playback. 3307 * @see android.media.MediaPlayer.OnInfoListener 3308 * @hide 3309 */ 3310 public static final int MEDIA_INFO_STARTED_AS_NEXT = 2; 3311 3312 /** The player just pushed the very first video frame for rendering. 3313 * @see android.media.MediaPlayer.OnInfoListener 3314 */ 3315 public static final int MEDIA_INFO_VIDEO_RENDERING_START = 3; 3316 3317 /** The video is too complex for the decoder: it can't decode frames fast 3318 * enough. Possibly only the audio plays fine at this stage. 3319 * @see android.media.MediaPlayer.OnInfoListener 3320 */ 3321 public static final int MEDIA_INFO_VIDEO_TRACK_LAGGING = 700; 3322 3323 /** MediaPlayer is temporarily pausing playback internally in order to 3324 * buffer more data. 3325 * @see android.media.MediaPlayer.OnInfoListener 3326 */ 3327 public static final int MEDIA_INFO_BUFFERING_START = 701; 3328 3329 /** MediaPlayer is resuming playback after filling buffers. 3330 * @see android.media.MediaPlayer.OnInfoListener 3331 */ 3332 public static final int MEDIA_INFO_BUFFERING_END = 702; 3333 3334 /** Bad interleaving means that a media has been improperly interleaved or 3335 * not interleaved at all, e.g has all the video samples first then all the 3336 * audio ones. Video is playing but a lot of disk seeks may be happening. 3337 * @see android.media.MediaPlayer.OnInfoListener 3338 */ 3339 public static final int MEDIA_INFO_BAD_INTERLEAVING = 800; 3340 3341 /** The media cannot be seeked (e.g live stream) 3342 * @see android.media.MediaPlayer.OnInfoListener 3343 */ 3344 public static final int MEDIA_INFO_NOT_SEEKABLE = 801; 3345 3346 /** A new set of metadata is available. 3347 * @see android.media.MediaPlayer.OnInfoListener 3348 */ 3349 public static final int MEDIA_INFO_METADATA_UPDATE = 802; 3350 3351 /** A new set of external-only metadata is available. Used by 3352 * JAVA framework to avoid triggering track scanning. 3353 * @hide 3354 */ 3355 public static final int MEDIA_INFO_EXTERNAL_METADATA_UPDATE = 803; 3356 3357 /** Failed to handle timed text track properly. 3358 * @see android.media.MediaPlayer.OnInfoListener 3359 * 3360 * {@hide} 3361 */ 3362 public static final int MEDIA_INFO_TIMED_TEXT_ERROR = 900; 3363 3364 /** Subtitle track was not supported by the media framework. 3365 * @see android.media.MediaPlayer.OnInfoListener 3366 */ 3367 public static final int MEDIA_INFO_UNSUPPORTED_SUBTITLE = 901; 3368 3369 /** Reading the subtitle track takes too long. 3370 * @see android.media.MediaPlayer.OnInfoListener 3371 */ 3372 public static final int MEDIA_INFO_SUBTITLE_TIMED_OUT = 902; 3373 3374 /** 3375 * Interface definition of a callback to be invoked to communicate some 3376 * info and/or warning about the media or its playback. 3377 */ 3378 public interface OnInfoListener 3379 { 3380 /** 3381 * Called to indicate an info or a warning. 3382 * 3383 * @param mp the MediaPlayer the info pertains to. 3384 * @param what the type of info or warning. 3385 * <ul> 3386 * <li>{@link #MEDIA_INFO_UNKNOWN} 3387 * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING} 3388 * <li>{@link #MEDIA_INFO_VIDEO_RENDERING_START} 3389 * <li>{@link #MEDIA_INFO_BUFFERING_START} 3390 * <li>{@link #MEDIA_INFO_BUFFERING_END} 3391 * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING} 3392 * <li>{@link #MEDIA_INFO_NOT_SEEKABLE} 3393 * <li>{@link #MEDIA_INFO_METADATA_UPDATE} 3394 * <li>{@link #MEDIA_INFO_UNSUPPORTED_SUBTITLE} 3395 * <li>{@link #MEDIA_INFO_SUBTITLE_TIMED_OUT} 3396 * </ul> 3397 * @param extra an extra code, specific to the info. Typically 3398 * implementation dependent. 3399 * @return True if the method handled the info, false if it didn't. 3400 * Returning false, or not having an OnErrorListener at all, will 3401 * cause the info to be discarded. 3402 */ 3403 boolean onInfo(MediaPlayer mp, int what, int extra); 3404 } 3405 3406 /** 3407 * Register a callback to be invoked when an info/warning is available. 3408 * 3409 * @param listener the callback that will be run 3410 */ 3411 public void setOnInfoListener(OnInfoListener listener) 3412 { 3413 mOnInfoListener = listener; 3414 } 3415 3416 private OnInfoListener mOnInfoListener; 3417 3418 /* 3419 * Test whether a given video scaling mode is supported. 3420 */ 3421 private boolean isVideoScalingModeSupported(int mode) { 3422 return (mode == VIDEO_SCALING_MODE_SCALE_TO_FIT || 3423 mode == VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING); 3424 } 3425 3426 /** @hide */ 3427 static class TimeProvider implements MediaPlayer.OnSeekCompleteListener, 3428 MediaTimeProvider { 3429 private static final String TAG = "MTP"; 3430 private static final long MAX_NS_WITHOUT_POSITION_CHECK = 5000000000L; 3431 private static final long MAX_EARLY_CALLBACK_US = 1000; 3432 private static final long TIME_ADJUSTMENT_RATE = 2; /* meaning 1/2 */ 3433 private long mLastTimeUs = 0; 3434 private MediaPlayer mPlayer; 3435 private boolean mPaused = true; 3436 private boolean mStopped = true; 3437 private boolean mBuffering; 3438 private long mLastReportedTime; 3439 private long mTimeAdjustment; 3440 // since we are expecting only a handful listeners per stream, there is 3441 // no need for log(N) search performance 3442 private MediaTimeProvider.OnMediaTimeListener mListeners[]; 3443 private long mTimes[]; 3444 private long mLastNanoTime; 3445 private Handler mEventHandler; 3446 private boolean mRefresh = false; 3447 private boolean mPausing = false; 3448 private boolean mSeeking = false; 3449 private static final int NOTIFY = 1; 3450 private static final int NOTIFY_TIME = 0; 3451 private static final int REFRESH_AND_NOTIFY_TIME = 1; 3452 private static final int NOTIFY_STOP = 2; 3453 private static final int NOTIFY_SEEK = 3; 3454 private HandlerThread mHandlerThread; 3455 3456 /** @hide */ 3457 public boolean DEBUG = false; 3458 3459 public TimeProvider(MediaPlayer mp) { 3460 mPlayer = mp; 3461 try { 3462 getCurrentTimeUs(true, false); 3463 } catch (IllegalStateException e) { 3464 // we assume starting position 3465 mRefresh = true; 3466 } 3467 3468 Looper looper; 3469 if ((looper = Looper.myLooper()) == null && 3470 (looper = Looper.getMainLooper()) == null) { 3471 // Create our own looper here in case MP was created without one 3472 mHandlerThread = new HandlerThread("MediaPlayerMTPEventThread", 3473 Process.THREAD_PRIORITY_FOREGROUND); 3474 mHandlerThread.start(); 3475 looper = mHandlerThread.getLooper(); 3476 } 3477 mEventHandler = new EventHandler(looper); 3478 3479 mListeners = new MediaTimeProvider.OnMediaTimeListener[0]; 3480 mTimes = new long[0]; 3481 mLastTimeUs = 0; 3482 mTimeAdjustment = 0; 3483 } 3484 3485 private void scheduleNotification(int type, long delayUs) { 3486 // ignore time notifications until seek is handled 3487 if (mSeeking && 3488 (type == NOTIFY_TIME || type == REFRESH_AND_NOTIFY_TIME)) { 3489 return; 3490 } 3491 3492 if (DEBUG) Log.v(TAG, "scheduleNotification " + type + " in " + delayUs); 3493 mEventHandler.removeMessages(NOTIFY); 3494 Message msg = mEventHandler.obtainMessage(NOTIFY, type, 0); 3495 mEventHandler.sendMessageDelayed(msg, (int) (delayUs / 1000)); 3496 } 3497 3498 /** @hide */ 3499 public void close() { 3500 mEventHandler.removeMessages(NOTIFY); 3501 if (mHandlerThread != null) { 3502 mHandlerThread.quitSafely(); 3503 mHandlerThread = null; 3504 } 3505 } 3506 3507 /** @hide */ 3508 protected void finalize() { 3509 if (mHandlerThread != null) { 3510 mHandlerThread.quitSafely(); 3511 } 3512 } 3513 3514 /** @hide */ 3515 public void onPaused(boolean paused) { 3516 synchronized(this) { 3517 if (DEBUG) Log.d(TAG, "onPaused: " + paused); 3518 if (mStopped) { // handle as seek if we were stopped 3519 mStopped = false; 3520 mSeeking = true; 3521 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3522 } else { 3523 mPausing = paused; // special handling if player disappeared 3524 mSeeking = false; 3525 scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */); 3526 } 3527 } 3528 } 3529 3530 /** @hide */ 3531 public void onBuffering(boolean buffering) { 3532 synchronized (this) { 3533 if (DEBUG) Log.d(TAG, "onBuffering: " + buffering); 3534 mBuffering = buffering; 3535 scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */); 3536 } 3537 } 3538 3539 /** @hide */ 3540 public void onStopped() { 3541 synchronized(this) { 3542 if (DEBUG) Log.d(TAG, "onStopped"); 3543 mPaused = true; 3544 mStopped = true; 3545 mSeeking = false; 3546 mBuffering = false; 3547 scheduleNotification(NOTIFY_STOP, 0 /* delay */); 3548 } 3549 } 3550 3551 /** @hide */ 3552 @Override 3553 public void onSeekComplete(MediaPlayer mp) { 3554 synchronized(this) { 3555 mStopped = false; 3556 mSeeking = true; 3557 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3558 } 3559 } 3560 3561 /** @hide */ 3562 public void onNewPlayer() { 3563 if (mRefresh) { 3564 synchronized(this) { 3565 mStopped = false; 3566 mSeeking = true; 3567 mBuffering = false; 3568 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3569 } 3570 } 3571 } 3572 3573 private synchronized void notifySeek() { 3574 mSeeking = false; 3575 try { 3576 long timeUs = getCurrentTimeUs(true, false); 3577 if (DEBUG) Log.d(TAG, "onSeekComplete at " + timeUs); 3578 3579 for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) { 3580 if (listener == null) { 3581 break; 3582 } 3583 listener.onSeek(timeUs); 3584 } 3585 } catch (IllegalStateException e) { 3586 // we should not be there, but at least signal pause 3587 if (DEBUG) Log.d(TAG, "onSeekComplete but no player"); 3588 mPausing = true; // special handling if player disappeared 3589 notifyTimedEvent(false /* refreshTime */); 3590 } 3591 } 3592 3593 private synchronized void notifyStop() { 3594 for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) { 3595 if (listener == null) { 3596 break; 3597 } 3598 listener.onStop(); 3599 } 3600 } 3601 3602 private int registerListener(MediaTimeProvider.OnMediaTimeListener listener) { 3603 int i = 0; 3604 for (; i < mListeners.length; i++) { 3605 if (mListeners[i] == listener || mListeners[i] == null) { 3606 break; 3607 } 3608 } 3609 3610 // new listener 3611 if (i >= mListeners.length) { 3612 MediaTimeProvider.OnMediaTimeListener[] newListeners = 3613 new MediaTimeProvider.OnMediaTimeListener[i + 1]; 3614 long[] newTimes = new long[i + 1]; 3615 System.arraycopy(mListeners, 0, newListeners, 0, mListeners.length); 3616 System.arraycopy(mTimes, 0, newTimes, 0, mTimes.length); 3617 mListeners = newListeners; 3618 mTimes = newTimes; 3619 } 3620 3621 if (mListeners[i] == null) { 3622 mListeners[i] = listener; 3623 mTimes[i] = MediaTimeProvider.NO_TIME; 3624 } 3625 return i; 3626 } 3627 3628 public void notifyAt( 3629 long timeUs, MediaTimeProvider.OnMediaTimeListener listener) { 3630 synchronized(this) { 3631 if (DEBUG) Log.d(TAG, "notifyAt " + timeUs); 3632 mTimes[registerListener(listener)] = timeUs; 3633 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3634 } 3635 } 3636 3637 public void scheduleUpdate(MediaTimeProvider.OnMediaTimeListener listener) { 3638 synchronized(this) { 3639 if (DEBUG) Log.d(TAG, "scheduleUpdate"); 3640 int i = registerListener(listener); 3641 3642 if (!mStopped) { 3643 mTimes[i] = 0; 3644 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3645 } 3646 } 3647 } 3648 3649 public void cancelNotifications( 3650 MediaTimeProvider.OnMediaTimeListener listener) { 3651 synchronized(this) { 3652 int i = 0; 3653 for (; i < mListeners.length; i++) { 3654 if (mListeners[i] == listener) { 3655 System.arraycopy(mListeners, i + 1, 3656 mListeners, i, mListeners.length - i - 1); 3657 System.arraycopy(mTimes, i + 1, 3658 mTimes, i, mTimes.length - i - 1); 3659 mListeners[mListeners.length - 1] = null; 3660 mTimes[mTimes.length - 1] = NO_TIME; 3661 break; 3662 } else if (mListeners[i] == null) { 3663 break; 3664 } 3665 } 3666 3667 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3668 } 3669 } 3670 3671 private synchronized void notifyTimedEvent(boolean refreshTime) { 3672 // figure out next callback 3673 long nowUs; 3674 try { 3675 nowUs = getCurrentTimeUs(refreshTime, true); 3676 } catch (IllegalStateException e) { 3677 // assume we paused until new player arrives 3678 mRefresh = true; 3679 mPausing = true; // this ensures that call succeeds 3680 nowUs = getCurrentTimeUs(refreshTime, true); 3681 } 3682 long nextTimeUs = nowUs; 3683 3684 if (mSeeking) { 3685 // skip timed-event notifications until seek is complete 3686 return; 3687 } 3688 3689 if (DEBUG) { 3690 StringBuilder sb = new StringBuilder(); 3691 sb.append("notifyTimedEvent(").append(mLastTimeUs).append(" -> ") 3692 .append(nowUs).append(") from {"); 3693 boolean first = true; 3694 for (long time: mTimes) { 3695 if (time == NO_TIME) { 3696 continue; 3697 } 3698 if (!first) sb.append(", "); 3699 sb.append(time); 3700 first = false; 3701 } 3702 sb.append("}"); 3703 Log.d(TAG, sb.toString()); 3704 } 3705 3706 Vector<MediaTimeProvider.OnMediaTimeListener> activatedListeners = 3707 new Vector<MediaTimeProvider.OnMediaTimeListener>(); 3708 for (int ix = 0; ix < mTimes.length; ix++) { 3709 if (mListeners[ix] == null) { 3710 break; 3711 } 3712 if (mTimes[ix] <= NO_TIME) { 3713 // ignore, unless we were stopped 3714 } else if (mTimes[ix] <= nowUs + MAX_EARLY_CALLBACK_US) { 3715 activatedListeners.add(mListeners[ix]); 3716 if (DEBUG) Log.d(TAG, "removed"); 3717 mTimes[ix] = NO_TIME; 3718 } else if (nextTimeUs == nowUs || mTimes[ix] < nextTimeUs) { 3719 nextTimeUs = mTimes[ix]; 3720 } 3721 } 3722 3723 if (nextTimeUs > nowUs && !mPaused) { 3724 // schedule callback at nextTimeUs 3725 if (DEBUG) Log.d(TAG, "scheduling for " + nextTimeUs + " and " + nowUs); 3726 scheduleNotification(NOTIFY_TIME, nextTimeUs - nowUs); 3727 } else { 3728 mEventHandler.removeMessages(NOTIFY); 3729 // no more callbacks 3730 } 3731 3732 for (MediaTimeProvider.OnMediaTimeListener listener: activatedListeners) { 3733 listener.onTimedEvent(nowUs); 3734 } 3735 } 3736 3737 private long getEstimatedTime(long nanoTime, boolean monotonic) { 3738 if (mPaused) { 3739 mLastReportedTime = mLastTimeUs + mTimeAdjustment; 3740 } else { 3741 long timeSinceRead = (nanoTime - mLastNanoTime) / 1000; 3742 mLastReportedTime = mLastTimeUs + timeSinceRead; 3743 if (mTimeAdjustment > 0) { 3744 long adjustment = 3745 mTimeAdjustment - timeSinceRead / TIME_ADJUSTMENT_RATE; 3746 if (adjustment <= 0) { 3747 mTimeAdjustment = 0; 3748 } else { 3749 mLastReportedTime += adjustment; 3750 } 3751 } 3752 } 3753 return mLastReportedTime; 3754 } 3755 3756 public long getCurrentTimeUs(boolean refreshTime, boolean monotonic) 3757 throws IllegalStateException { 3758 synchronized (this) { 3759 // we always refresh the time when the paused-state changes, because 3760 // we expect to have received the pause-change event delayed. 3761 if (mPaused && !refreshTime) { 3762 return mLastReportedTime; 3763 } 3764 3765 long nanoTime = System.nanoTime(); 3766 if (refreshTime || 3767 nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) { 3768 try { 3769 mLastTimeUs = mPlayer.getCurrentPosition() * 1000L; 3770 mPaused = !mPlayer.isPlaying() || mBuffering; 3771 if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs); 3772 } catch (IllegalStateException e) { 3773 if (mPausing) { 3774 // if we were pausing, get last estimated timestamp 3775 mPausing = false; 3776 getEstimatedTime(nanoTime, monotonic); 3777 mPaused = true; 3778 if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime); 3779 return mLastReportedTime; 3780 } 3781 // TODO get time when prepared 3782 throw e; 3783 } 3784 mLastNanoTime = nanoTime; 3785 if (monotonic && mLastTimeUs < mLastReportedTime) { 3786 /* have to adjust time */ 3787 mTimeAdjustment = mLastReportedTime - mLastTimeUs; 3788 if (mTimeAdjustment > 1000000) { 3789 // schedule seeked event if time jumped significantly 3790 // TODO: do this properly by introducing an exception 3791 mStopped = false; 3792 mSeeking = true; 3793 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3794 } 3795 } else { 3796 mTimeAdjustment = 0; 3797 } 3798 } 3799 3800 return getEstimatedTime(nanoTime, monotonic); 3801 } 3802 } 3803 3804 private class EventHandler extends Handler { 3805 public EventHandler(Looper looper) { 3806 super(looper); 3807 } 3808 3809 @Override 3810 public void handleMessage(Message msg) { 3811 if (msg.what == NOTIFY) { 3812 switch (msg.arg1) { 3813 case NOTIFY_TIME: 3814 notifyTimedEvent(false /* refreshTime */); 3815 break; 3816 case REFRESH_AND_NOTIFY_TIME: 3817 notifyTimedEvent(true /* refreshTime */); 3818 break; 3819 case NOTIFY_STOP: 3820 notifyStop(); 3821 break; 3822 case NOTIFY_SEEK: 3823 notifySeek(); 3824 break; 3825 } 3826 } 3827 } 3828 } 3829 } 3830} 3831