MediaPlayer.java revision 69c8cc2930a657d68a3b0365f6aa9e7524889c70
1/* 2 * Copyright (C) 2006 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17package android.media; 18 19import android.annotation.IntDef; 20import android.annotation.NonNull; 21import android.annotation.Nullable; 22import android.app.ActivityThread; 23import android.app.AppOpsManager; 24import android.content.ContentResolver; 25import android.content.Context; 26import android.content.res.AssetFileDescriptor; 27import android.net.Uri; 28import android.os.Handler; 29import android.os.HandlerThread; 30import android.os.IBinder; 31import android.os.Looper; 32import android.os.Message; 33import android.os.Parcel; 34import android.os.Parcelable; 35import android.os.Process; 36import android.os.PowerManager; 37import android.os.RemoteException; 38import android.os.ServiceManager; 39import android.os.SystemProperties; 40import android.provider.Settings; 41import android.system.ErrnoException; 42import android.system.OsConstants; 43import android.util.Log; 44import android.util.Pair; 45import android.view.Surface; 46import android.view.SurfaceHolder; 47import android.widget.VideoView; 48import android.graphics.SurfaceTexture; 49import android.media.AudioManager; 50import android.media.MediaFormat; 51import android.media.MediaTimeProvider; 52import android.media.PlaybackParams; 53import android.media.SubtitleController; 54import android.media.SubtitleController.Anchor; 55import android.media.SubtitleData; 56import android.media.SubtitleTrack.RenderingWidget; 57import android.media.SyncParams; 58 59import com.android.internal.app.IAppOpsService; 60 61import libcore.io.IoBridge; 62import libcore.io.Libcore; 63 64import java.io.ByteArrayOutputStream; 65import java.io.File; 66import java.io.FileDescriptor; 67import java.io.FileInputStream; 68import java.io.FileNotFoundException; 69import java.io.IOException; 70import java.io.InputStream; 71import java.lang.Runnable; 72import java.lang.annotation.Retention; 73import java.lang.annotation.RetentionPolicy; 74import java.net.InetSocketAddress; 75import java.util.BitSet; 76import java.util.HashSet; 77import java.util.Map; 78import java.util.Scanner; 79import java.util.Set; 80import java.util.Vector; 81import java.lang.ref.WeakReference; 82 83/** 84 * MediaPlayer class can be used to control playback 85 * of audio/video files and streams. An example on how to use the methods in 86 * this class can be found in {@link android.widget.VideoView}. 87 * 88 * <p>Topics covered here are: 89 * <ol> 90 * <li><a href="#StateDiagram">State Diagram</a> 91 * <li><a href="#Valid_and_Invalid_States">Valid and Invalid States</a> 92 * <li><a href="#Permissions">Permissions</a> 93 * <li><a href="#Callbacks">Register informational and error callbacks</a> 94 * </ol> 95 * 96 * <div class="special reference"> 97 * <h3>Developer Guides</h3> 98 * <p>For more information about how to use MediaPlayer, read the 99 * <a href="{@docRoot}guide/topics/media/mediaplayer.html">Media Playback</a> developer guide.</p> 100 * </div> 101 * 102 * <a name="StateDiagram"></a> 103 * <h3>State Diagram</h3> 104 * 105 * <p>Playback control of audio/video files and streams is managed as a state 106 * machine. The following diagram shows the life cycle and the states of a 107 * MediaPlayer object driven by the supported playback control operations. 108 * The ovals represent the states a MediaPlayer object may reside 109 * in. The arcs represent the playback control operations that drive the object 110 * state transition. There are two types of arcs. The arcs with a single arrow 111 * head represent synchronous method calls, while those with 112 * a double arrow head represent asynchronous method calls.</p> 113 * 114 * <p><img src="../../../images/mediaplayer_state_diagram.gif" 115 * alt="MediaPlayer State diagram" 116 * border="0" /></p> 117 * 118 * <p>From this state diagram, one can see that a MediaPlayer object has the 119 * following states:</p> 120 * <ul> 121 * <li>When a MediaPlayer object is just created using <code>new</code> or 122 * after {@link #reset()} is called, it is in the <em>Idle</em> state; and after 123 * {@link #release()} is called, it is in the <em>End</em> state. Between these 124 * two states is the life cycle of the MediaPlayer object. 125 * <ul> 126 * <li>There is a subtle but important difference between a newly constructed 127 * MediaPlayer object and the MediaPlayer object after {@link #reset()} 128 * is called. It is a programming error to invoke methods such 129 * as {@link #getCurrentPosition()}, 130 * {@link #getDuration()}, {@link #getVideoHeight()}, 131 * {@link #getVideoWidth()}, {@link #setAudioStreamType(int)}, 132 * {@link #setLooping(boolean)}, 133 * {@link #setVolume(float, float)}, {@link #pause()}, {@link #start()}, 134 * {@link #stop()}, {@link #seekTo(int)}, {@link #prepare()} or 135 * {@link #prepareAsync()} in the <em>Idle</em> state for both cases. If any of these 136 * methods is called right after a MediaPlayer object is constructed, 137 * the user supplied callback method OnErrorListener.onError() won't be 138 * called by the internal player engine and the object state remains 139 * unchanged; but if these methods are called right after {@link #reset()}, 140 * the user supplied callback method OnErrorListener.onError() will be 141 * invoked by the internal player engine and the object will be 142 * transfered to the <em>Error</em> state. </li> 143 * <li>It is also recommended that once 144 * a MediaPlayer object is no longer being used, call {@link #release()} immediately 145 * so that resources used by the internal player engine associated with the 146 * MediaPlayer object can be released immediately. Resource may include 147 * singleton resources such as hardware acceleration components and 148 * failure to call {@link #release()} may cause subsequent instances of 149 * MediaPlayer objects to fallback to software implementations or fail 150 * altogether. Once the MediaPlayer 151 * object is in the <em>End</em> state, it can no longer be used and 152 * there is no way to bring it back to any other state. </li> 153 * <li>Furthermore, 154 * the MediaPlayer objects created using <code>new</code> is in the 155 * <em>Idle</em> state, while those created with one 156 * of the overloaded convenient <code>create</code> methods are <em>NOT</em> 157 * in the <em>Idle</em> state. In fact, the objects are in the <em>Prepared</em> 158 * state if the creation using <code>create</code> method is successful. 159 * </li> 160 * </ul> 161 * </li> 162 * <li>In general, some playback control operation may fail due to various 163 * reasons, such as unsupported audio/video format, poorly interleaved 164 * audio/video, resolution too high, streaming timeout, and the like. 165 * Thus, error reporting and recovery is an important concern under 166 * these circumstances. Sometimes, due to programming errors, invoking a playback 167 * control operation in an invalid state may also occur. Under all these 168 * error conditions, the internal player engine invokes a user supplied 169 * OnErrorListener.onError() method if an OnErrorListener has been 170 * registered beforehand via 171 * {@link #setOnErrorListener(android.media.MediaPlayer.OnErrorListener)}. 172 * <ul> 173 * <li>It is important to note that once an error occurs, the 174 * MediaPlayer object enters the <em>Error</em> state (except as noted 175 * above), even if an error listener has not been registered by the application.</li> 176 * <li>In order to reuse a MediaPlayer object that is in the <em> 177 * Error</em> state and recover from the error, 178 * {@link #reset()} can be called to restore the object to its <em>Idle</em> 179 * state.</li> 180 * <li>It is good programming practice to have your application 181 * register a OnErrorListener to look out for error notifications from 182 * the internal player engine.</li> 183 * <li>IllegalStateException is 184 * thrown to prevent programming errors such as calling {@link #prepare()}, 185 * {@link #prepareAsync()}, or one of the overloaded <code>setDataSource 186 * </code> methods in an invalid state. </li> 187 * </ul> 188 * </li> 189 * <li>Calling 190 * {@link #setDataSource(FileDescriptor)}, or 191 * {@link #setDataSource(String)}, or 192 * {@link #setDataSource(Context, Uri)}, or 193 * {@link #setDataSource(FileDescriptor, long, long)}, or 194 * {@link #setDataSource(MediaDataSource)} transfers a 195 * MediaPlayer object in the <em>Idle</em> state to the 196 * <em>Initialized</em> state. 197 * <ul> 198 * <li>An IllegalStateException is thrown if 199 * setDataSource() is called in any other state.</li> 200 * <li>It is good programming 201 * practice to always look out for <code>IllegalArgumentException</code> 202 * and <code>IOException</code> that may be thrown from the overloaded 203 * <code>setDataSource</code> methods.</li> 204 * </ul> 205 * </li> 206 * <li>A MediaPlayer object must first enter the <em>Prepared</em> state 207 * before playback can be started. 208 * <ul> 209 * <li>There are two ways (synchronous vs. 210 * asynchronous) that the <em>Prepared</em> state can be reached: 211 * either a call to {@link #prepare()} (synchronous) which 212 * transfers the object to the <em>Prepared</em> state once the method call 213 * returns, or a call to {@link #prepareAsync()} (asynchronous) which 214 * first transfers the object to the <em>Preparing</em> state after the 215 * call returns (which occurs almost right way) while the internal 216 * player engine continues working on the rest of preparation work 217 * until the preparation work completes. When the preparation completes or when {@link #prepare()} call returns, 218 * the internal player engine then calls a user supplied callback method, 219 * onPrepared() of the OnPreparedListener interface, if an 220 * OnPreparedListener is registered beforehand via {@link 221 * #setOnPreparedListener(android.media.MediaPlayer.OnPreparedListener)}.</li> 222 * <li>It is important to note that 223 * the <em>Preparing</em> state is a transient state, and the behavior 224 * of calling any method with side effect while a MediaPlayer object is 225 * in the <em>Preparing</em> state is undefined.</li> 226 * <li>An IllegalStateException is 227 * thrown if {@link #prepare()} or {@link #prepareAsync()} is called in 228 * any other state.</li> 229 * <li>While in the <em>Prepared</em> state, properties 230 * such as audio/sound volume, screenOnWhilePlaying, looping can be 231 * adjusted by invoking the corresponding set methods.</li> 232 * </ul> 233 * </li> 234 * <li>To start the playback, {@link #start()} must be called. After 235 * {@link #start()} returns successfully, the MediaPlayer object is in the 236 * <em>Started</em> state. {@link #isPlaying()} can be called to test 237 * whether the MediaPlayer object is in the <em>Started</em> state. 238 * <ul> 239 * <li>While in the <em>Started</em> state, the internal player engine calls 240 * a user supplied OnBufferingUpdateListener.onBufferingUpdate() callback 241 * method if a OnBufferingUpdateListener has been registered beforehand 242 * via {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}. 243 * This callback allows applications to keep track of the buffering status 244 * while streaming audio/video.</li> 245 * <li>Calling {@link #start()} has not effect 246 * on a MediaPlayer object that is already in the <em>Started</em> state.</li> 247 * </ul> 248 * </li> 249 * <li>Playback can be paused and stopped, and the current playback position 250 * can be adjusted. Playback can be paused via {@link #pause()}. When the call to 251 * {@link #pause()} returns, the MediaPlayer object enters the 252 * <em>Paused</em> state. Note that the transition from the <em>Started</em> 253 * state to the <em>Paused</em> state and vice versa happens 254 * asynchronously in the player engine. It may take some time before 255 * the state is updated in calls to {@link #isPlaying()}, and it can be 256 * a number of seconds in the case of streamed content. 257 * <ul> 258 * <li>Calling {@link #start()} to resume playback for a paused 259 * MediaPlayer object, and the resumed playback 260 * position is the same as where it was paused. When the call to 261 * {@link #start()} returns, the paused MediaPlayer object goes back to 262 * the <em>Started</em> state.</li> 263 * <li>Calling {@link #pause()} has no effect on 264 * a MediaPlayer object that is already in the <em>Paused</em> state.</li> 265 * </ul> 266 * </li> 267 * <li>Calling {@link #stop()} stops playback and causes a 268 * MediaPlayer in the <em>Started</em>, <em>Paused</em>, <em>Prepared 269 * </em> or <em>PlaybackCompleted</em> state to enter the 270 * <em>Stopped</em> state. 271 * <ul> 272 * <li>Once in the <em>Stopped</em> state, playback cannot be started 273 * until {@link #prepare()} or {@link #prepareAsync()} are called to set 274 * the MediaPlayer object to the <em>Prepared</em> state again.</li> 275 * <li>Calling {@link #stop()} has no effect on a MediaPlayer 276 * object that is already in the <em>Stopped</em> state.</li> 277 * </ul> 278 * </li> 279 * <li>The playback position can be adjusted with a call to 280 * {@link #seekTo(int)}. 281 * <ul> 282 * <li>Although the asynchronuous {@link #seekTo(int)} 283 * call returns right way, the actual seek operation may take a while to 284 * finish, especially for audio/video being streamed. When the actual 285 * seek operation completes, the internal player engine calls a user 286 * supplied OnSeekComplete.onSeekComplete() if an OnSeekCompleteListener 287 * has been registered beforehand via 288 * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}.</li> 289 * <li>Please 290 * note that {@link #seekTo(int)} can also be called in the other states, 291 * such as <em>Prepared</em>, <em>Paused</em> and <em>PlaybackCompleted 292 * </em> state.</li> 293 * <li>Furthermore, the actual current playback position 294 * can be retrieved with a call to {@link #getCurrentPosition()}, which 295 * is helpful for applications such as a Music player that need to keep 296 * track of the playback progress.</li> 297 * </ul> 298 * </li> 299 * <li>When the playback reaches the end of stream, the playback completes. 300 * <ul> 301 * <li>If the looping mode was being set to <var>true</var>with 302 * {@link #setLooping(boolean)}, the MediaPlayer object shall remain in 303 * the <em>Started</em> state.</li> 304 * <li>If the looping mode was set to <var>false 305 * </var>, the player engine calls a user supplied callback method, 306 * OnCompletion.onCompletion(), if a OnCompletionListener is registered 307 * beforehand via {@link #setOnCompletionListener(OnCompletionListener)}. 308 * The invoke of the callback signals that the object is now in the <em> 309 * PlaybackCompleted</em> state.</li> 310 * <li>While in the <em>PlaybackCompleted</em> 311 * state, calling {@link #start()} can restart the playback from the 312 * beginning of the audio/video source.</li> 313 * </ul> 314 * 315 * 316 * <a name="Valid_and_Invalid_States"></a> 317 * <h3>Valid and invalid states</h3> 318 * 319 * <table border="0" cellspacing="0" cellpadding="0"> 320 * <tr><td>Method Name </p></td> 321 * <td>Valid Sates </p></td> 322 * <td>Invalid States </p></td> 323 * <td>Comments </p></td></tr> 324 * <tr><td>attachAuxEffect </p></td> 325 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 326 * <td>{Idle, Error} </p></td> 327 * <td>This method must be called after setDataSource. 328 * Calling it does not change the object state. </p></td></tr> 329 * <tr><td>getAudioSessionId </p></td> 330 * <td>any </p></td> 331 * <td>{} </p></td> 332 * <td>This method can be called in any state and calling it does not change 333 * the object state. </p></td></tr> 334 * <tr><td>getCurrentPosition </p></td> 335 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 336 * PlaybackCompleted} </p></td> 337 * <td>{Error}</p></td> 338 * <td>Successful invoke of this method in a valid state does not change the 339 * state. Calling this method in an invalid state transfers the object 340 * to the <em>Error</em> state. </p></td></tr> 341 * <tr><td>getDuration </p></td> 342 * <td>{Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 343 * <td>{Idle, Initialized, Error} </p></td> 344 * <td>Successful invoke of this method in a valid state does not change the 345 * state. Calling this method in an invalid state transfers the object 346 * to the <em>Error</em> state. </p></td></tr> 347 * <tr><td>getVideoHeight </p></td> 348 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 349 * PlaybackCompleted}</p></td> 350 * <td>{Error}</p></td> 351 * <td>Successful invoke of this method in a valid state does not change the 352 * state. Calling this method in an invalid state transfers the object 353 * to the <em>Error</em> state. </p></td></tr> 354 * <tr><td>getVideoWidth </p></td> 355 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 356 * PlaybackCompleted}</p></td> 357 * <td>{Error}</p></td> 358 * <td>Successful invoke of this method in a valid state does not change 359 * the state. Calling this method in an invalid state transfers the 360 * object to the <em>Error</em> state. </p></td></tr> 361 * <tr><td>isPlaying </p></td> 362 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 363 * PlaybackCompleted}</p></td> 364 * <td>{Error}</p></td> 365 * <td>Successful invoke of this method in a valid state does not change 366 * the state. Calling this method in an invalid state transfers the 367 * object to the <em>Error</em> state. </p></td></tr> 368 * <tr><td>pause </p></td> 369 * <td>{Started, Paused, PlaybackCompleted}</p></td> 370 * <td>{Idle, Initialized, Prepared, Stopped, Error}</p></td> 371 * <td>Successful invoke of this method in a valid state transfers the 372 * object to the <em>Paused</em> state. Calling this method in an 373 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 374 * <tr><td>prepare </p></td> 375 * <td>{Initialized, Stopped} </p></td> 376 * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td> 377 * <td>Successful invoke of this method in a valid state transfers the 378 * object to the <em>Prepared</em> state. Calling this method in an 379 * invalid state throws an IllegalStateException.</p></td></tr> 380 * <tr><td>prepareAsync </p></td> 381 * <td>{Initialized, Stopped} </p></td> 382 * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td> 383 * <td>Successful invoke of this method in a valid state transfers the 384 * object to the <em>Preparing</em> state. Calling this method in an 385 * invalid state throws an IllegalStateException.</p></td></tr> 386 * <tr><td>release </p></td> 387 * <td>any </p></td> 388 * <td>{} </p></td> 389 * <td>After {@link #release()}, the object is no longer available. </p></td></tr> 390 * <tr><td>reset </p></td> 391 * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped, 392 * PlaybackCompleted, Error}</p></td> 393 * <td>{}</p></td> 394 * <td>After {@link #reset()}, the object is like being just created.</p></td></tr> 395 * <tr><td>seekTo </p></td> 396 * <td>{Prepared, Started, Paused, PlaybackCompleted} </p></td> 397 * <td>{Idle, Initialized, Stopped, Error}</p></td> 398 * <td>Successful invoke of this method in a valid state does not change 399 * the state. Calling this method in an invalid state transfers the 400 * object to the <em>Error</em> state. </p></td></tr> 401 * <tr><td>setAudioAttributes </p></td> 402 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 403 * PlaybackCompleted}</p></td> 404 * <td>{Error}</p></td> 405 * <td>Successful invoke of this method does not change the state. In order for the 406 * target audio attributes type to become effective, this method must be called before 407 * prepare() or prepareAsync().</p></td></tr> 408 * <tr><td>setAudioSessionId </p></td> 409 * <td>{Idle} </p></td> 410 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted, 411 * Error} </p></td> 412 * <td>This method must be called in idle state as the audio session ID must be known before 413 * calling setDataSource. Calling it does not change the object state. </p></td></tr> 414 * <tr><td>setAudioStreamType </p></td> 415 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 416 * PlaybackCompleted}</p></td> 417 * <td>{Error}</p></td> 418 * <td>Successful invoke of this method does not change the state. In order for the 419 * target audio stream type to become effective, this method must be called before 420 * prepare() or prepareAsync().</p></td></tr> 421 * <tr><td>setAuxEffectSendLevel </p></td> 422 * <td>any</p></td> 423 * <td>{} </p></td> 424 * <td>Calling this method does not change the object state. </p></td></tr> 425 * <tr><td>setDataSource </p></td> 426 * <td>{Idle} </p></td> 427 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted, 428 * Error} </p></td> 429 * <td>Successful invoke of this method in a valid state transfers the 430 * object to the <em>Initialized</em> state. Calling this method in an 431 * invalid state throws an IllegalStateException.</p></td></tr> 432 * <tr><td>setDisplay </p></td> 433 * <td>any </p></td> 434 * <td>{} </p></td> 435 * <td>This method can be called in any state and calling it does not change 436 * the object state. </p></td></tr> 437 * <tr><td>setSurface </p></td> 438 * <td>any </p></td> 439 * <td>{} </p></td> 440 * <td>This method can be called in any state and calling it does not change 441 * the object state. </p></td></tr> 442 * <tr><td>setVideoScalingMode </p></td> 443 * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td> 444 * <td>{Idle, Error}</p></td> 445 * <td>Successful invoke of this method does not change the state.</p></td></tr> 446 * <tr><td>setLooping </p></td> 447 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 448 * PlaybackCompleted}</p></td> 449 * <td>{Error}</p></td> 450 * <td>Successful invoke of this method in a valid state does not change 451 * the state. Calling this method in an 452 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 453 * <tr><td>isLooping </p></td> 454 * <td>any </p></td> 455 * <td>{} </p></td> 456 * <td>This method can be called in any state and calling it does not change 457 * the object state. </p></td></tr> 458 * <tr><td>setOnBufferingUpdateListener </p></td> 459 * <td>any </p></td> 460 * <td>{} </p></td> 461 * <td>This method can be called in any state and calling it does not change 462 * the object state. </p></td></tr> 463 * <tr><td>setOnCompletionListener </p></td> 464 * <td>any </p></td> 465 * <td>{} </p></td> 466 * <td>This method can be called in any state and calling it does not change 467 * the object state. </p></td></tr> 468 * <tr><td>setOnErrorListener </p></td> 469 * <td>any </p></td> 470 * <td>{} </p></td> 471 * <td>This method can be called in any state and calling it does not change 472 * the object state. </p></td></tr> 473 * <tr><td>setOnPreparedListener </p></td> 474 * <td>any </p></td> 475 * <td>{} </p></td> 476 * <td>This method can be called in any state and calling it does not change 477 * the object state. </p></td></tr> 478 * <tr><td>setOnSeekCompleteListener </p></td> 479 * <td>any </p></td> 480 * <td>{} </p></td> 481 * <td>This method can be called in any state and calling it does not change 482 * the object state. </p></td></tr> 483 * <tr><td>setPlaybackParams</p></td> 484 * <td>any </p></td> 485 * <td>{} </p></td> 486 * <td>This method can be called in any state and calling it does not change 487 * the object state. </p></td></tr> 488 * <tr><td>setScreenOnWhilePlaying</></td> 489 * <td>any </p></td> 490 * <td>{} </p></td> 491 * <td>This method can be called in any state and calling it does not change 492 * the object state. </p></td></tr> 493 * <tr><td>setVolume </p></td> 494 * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused, 495 * PlaybackCompleted}</p></td> 496 * <td>{Error}</p></td> 497 * <td>Successful invoke of this method does not change the state. 498 * <tr><td>setWakeMode </p></td> 499 * <td>any </p></td> 500 * <td>{} </p></td> 501 * <td>This method can be called in any state and calling it does not change 502 * the object state.</p></td></tr> 503 * <tr><td>start </p></td> 504 * <td>{Prepared, Started, Paused, PlaybackCompleted}</p></td> 505 * <td>{Idle, Initialized, Stopped, Error}</p></td> 506 * <td>Successful invoke of this method in a valid state transfers the 507 * object to the <em>Started</em> state. Calling this method in an 508 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 509 * <tr><td>stop </p></td> 510 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 511 * <td>{Idle, Initialized, Error}</p></td> 512 * <td>Successful invoke of this method in a valid state transfers the 513 * object to the <em>Stopped</em> state. Calling this method in an 514 * invalid state transfers the object to the <em>Error</em> state.</p></td></tr> 515 * <tr><td>getTrackInfo </p></td> 516 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 517 * <td>{Idle, Initialized, Error}</p></td> 518 * <td>Successful invoke of this method does not change the state.</p></td></tr> 519 * <tr><td>addTimedTextSource </p></td> 520 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 521 * <td>{Idle, Initialized, Error}</p></td> 522 * <td>Successful invoke of this method does not change the state.</p></td></tr> 523 * <tr><td>selectTrack </p></td> 524 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 525 * <td>{Idle, Initialized, Error}</p></td> 526 * <td>Successful invoke of this method does not change the state.</p></td></tr> 527 * <tr><td>deselectTrack </p></td> 528 * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td> 529 * <td>{Idle, Initialized, Error}</p></td> 530 * <td>Successful invoke of this method does not change the state.</p></td></tr> 531 * 532 * </table> 533 * 534 * <a name="Permissions"></a> 535 * <h3>Permissions</h3> 536 * <p>One may need to declare a corresponding WAKE_LOCK permission {@link 537 * android.R.styleable#AndroidManifestUsesPermission <uses-permission>} 538 * element. 539 * 540 * <p>This class requires the {@link android.Manifest.permission#INTERNET} permission 541 * when used with network-based content. 542 * 543 * <a name="Callbacks"></a> 544 * <h3>Callbacks</h3> 545 * <p>Applications may want to register for informational and error 546 * events in order to be informed of some internal state update and 547 * possible runtime errors during playback or streaming. Registration for 548 * these events is done by properly setting the appropriate listeners (via calls 549 * to 550 * {@link #setOnPreparedListener(OnPreparedListener)}setOnPreparedListener, 551 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}setOnVideoSizeChangedListener, 552 * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}setOnSeekCompleteListener, 553 * {@link #setOnCompletionListener(OnCompletionListener)}setOnCompletionListener, 554 * {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}setOnBufferingUpdateListener, 555 * {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener, 556 * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener, etc). 557 * In order to receive the respective callback 558 * associated with these listeners, applications are required to create 559 * MediaPlayer objects on a thread with its own Looper running (main UI 560 * thread by default has a Looper running). 561 * 562 */ 563public class MediaPlayer implements SubtitleController.Listener 564{ 565 /** 566 Constant to retrieve only the new metadata since the last 567 call. 568 // FIXME: unhide. 569 // FIXME: add link to getMetadata(boolean, boolean) 570 {@hide} 571 */ 572 public static final boolean METADATA_UPDATE_ONLY = true; 573 574 /** 575 Constant to retrieve all the metadata. 576 // FIXME: unhide. 577 // FIXME: add link to getMetadata(boolean, boolean) 578 {@hide} 579 */ 580 public static final boolean METADATA_ALL = false; 581 582 /** 583 Constant to enable the metadata filter during retrieval. 584 // FIXME: unhide. 585 // FIXME: add link to getMetadata(boolean, boolean) 586 {@hide} 587 */ 588 public static final boolean APPLY_METADATA_FILTER = true; 589 590 /** 591 Constant to disable the metadata filter during retrieval. 592 // FIXME: unhide. 593 // FIXME: add link to getMetadata(boolean, boolean) 594 {@hide} 595 */ 596 public static final boolean BYPASS_METADATA_FILTER = false; 597 598 static { 599 System.loadLibrary("media_jni"); 600 native_init(); 601 } 602 603 private final static String TAG = "MediaPlayer"; 604 // Name of the remote interface for the media player. Must be kept 605 // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE 606 // macro invocation in IMediaPlayer.cpp 607 private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer"; 608 609 private long mNativeContext; // accessed by native methods 610 private long mNativeSurfaceTexture; // accessed by native methods 611 private int mListenerContext; // accessed by native methods 612 private SurfaceHolder mSurfaceHolder; 613 private EventHandler mEventHandler; 614 private PowerManager.WakeLock mWakeLock = null; 615 private boolean mScreenOnWhilePlaying; 616 private boolean mStayAwake; 617 private final IAppOpsService mAppOps; 618 private int mStreamType = AudioManager.USE_DEFAULT_STREAM_TYPE; 619 private int mUsage = -1; 620 private boolean mBypassInterruptionPolicy; 621 622 /** 623 * Default constructor. Consider using one of the create() methods for 624 * synchronously instantiating a MediaPlayer from a Uri or resource. 625 * <p>When done with the MediaPlayer, you should call {@link #release()}, 626 * to free the resources. If not released, too many MediaPlayer instances may 627 * result in an exception.</p> 628 */ 629 public MediaPlayer() { 630 631 Looper looper; 632 if ((looper = Looper.myLooper()) != null) { 633 mEventHandler = new EventHandler(this, looper); 634 } else if ((looper = Looper.getMainLooper()) != null) { 635 mEventHandler = new EventHandler(this, looper); 636 } else { 637 mEventHandler = null; 638 } 639 640 mTimeProvider = new TimeProvider(this); 641 mOpenSubtitleSources = new Vector<InputStream>(); 642 IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE); 643 mAppOps = IAppOpsService.Stub.asInterface(b); 644 645 /* Native setup requires a weak reference to our object. 646 * It's easier to create it here than in C++. 647 */ 648 native_setup(new WeakReference<MediaPlayer>(this)); 649 } 650 651 /* 652 * Update the MediaPlayer SurfaceTexture. 653 * Call after setting a new display surface. 654 */ 655 private native void _setVideoSurface(Surface surface); 656 657 /* Do not change these values (starting with INVOKE_ID) without updating 658 * their counterparts in include/media/mediaplayer.h! 659 */ 660 private static final int INVOKE_ID_GET_TRACK_INFO = 1; 661 private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE = 2; 662 private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3; 663 private static final int INVOKE_ID_SELECT_TRACK = 4; 664 private static final int INVOKE_ID_DESELECT_TRACK = 5; 665 private static final int INVOKE_ID_SET_VIDEO_SCALE_MODE = 6; 666 private static final int INVOKE_ID_GET_SELECTED_TRACK = 7; 667 668 /** 669 * Create a request parcel which can be routed to the native media 670 * player using {@link #invoke(Parcel, Parcel)}. The Parcel 671 * returned has the proper InterfaceToken set. The caller should 672 * not overwrite that token, i.e it can only append data to the 673 * Parcel. 674 * 675 * @return A parcel suitable to hold a request for the native 676 * player. 677 * {@hide} 678 */ 679 public Parcel newRequest() { 680 Parcel parcel = Parcel.obtain(); 681 parcel.writeInterfaceToken(IMEDIA_PLAYER); 682 return parcel; 683 } 684 685 /** 686 * Invoke a generic method on the native player using opaque 687 * parcels for the request and reply. Both payloads' format is a 688 * convention between the java caller and the native player. 689 * Must be called after setDataSource to make sure a native player 690 * exists. On failure, a RuntimeException is thrown. 691 * 692 * @param request Parcel with the data for the extension. The 693 * caller must use {@link #newRequest()} to get one. 694 * 695 * @param reply Output parcel with the data returned by the 696 * native player. 697 * {@hide} 698 */ 699 public void invoke(Parcel request, Parcel reply) { 700 int retcode = native_invoke(request, reply); 701 reply.setDataPosition(0); 702 if (retcode != 0) { 703 throw new RuntimeException("failure code: " + retcode); 704 } 705 } 706 707 /** 708 * Sets the {@link SurfaceHolder} to use for displaying the video 709 * portion of the media. 710 * 711 * Either a surface holder or surface must be set if a display or video sink 712 * is needed. Not calling this method or {@link #setSurface(Surface)} 713 * when playing back a video will result in only the audio track being played. 714 * A null surface holder or surface will result in only the audio track being 715 * played. 716 * 717 * @param sh the SurfaceHolder to use for video display 718 */ 719 public void setDisplay(SurfaceHolder sh) { 720 mSurfaceHolder = sh; 721 Surface surface; 722 if (sh != null) { 723 surface = sh.getSurface(); 724 } else { 725 surface = null; 726 } 727 _setVideoSurface(surface); 728 updateSurfaceScreenOn(); 729 } 730 731 /** 732 * Sets the {@link Surface} to be used as the sink for the video portion of 733 * the media. This is similar to {@link #setDisplay(SurfaceHolder)}, but 734 * does not support {@link #setScreenOnWhilePlaying(boolean)}. Setting a 735 * Surface will un-set any Surface or SurfaceHolder that was previously set. 736 * A null surface will result in only the audio track being played. 737 * 738 * If the Surface sends frames to a {@link SurfaceTexture}, the timestamps 739 * returned from {@link SurfaceTexture#getTimestamp()} will have an 740 * unspecified zero point. These timestamps cannot be directly compared 741 * between different media sources, different instances of the same media 742 * source, or multiple runs of the same program. The timestamp is normally 743 * monotonically increasing and is unaffected by time-of-day adjustments, 744 * but it is reset when the position is set. 745 * 746 * @param surface The {@link Surface} to be used for the video portion of 747 * the media. 748 */ 749 public void setSurface(Surface surface) { 750 if (mScreenOnWhilePlaying && surface != null) { 751 Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective for Surface"); 752 } 753 mSurfaceHolder = null; 754 _setVideoSurface(surface); 755 updateSurfaceScreenOn(); 756 } 757 758 /* Do not change these video scaling mode values below without updating 759 * their counterparts in system/window.h! Please do not forget to update 760 * {@link #isVideoScalingModeSupported} when new video scaling modes 761 * are added. 762 */ 763 /** 764 * Specifies a video scaling mode. The content is stretched to the 765 * surface rendering area. When the surface has the same aspect ratio 766 * as the content, the aspect ratio of the content is maintained; 767 * otherwise, the aspect ratio of the content is not maintained when video 768 * is being rendered. Unlike {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}, 769 * there is no content cropping with this video scaling mode. 770 */ 771 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1; 772 773 /** 774 * Specifies a video scaling mode. The content is scaled, maintaining 775 * its aspect ratio. The whole surface area is always used. When the 776 * aspect ratio of the content is the same as the surface, no content 777 * is cropped; otherwise, content is cropped to fit the surface. 778 */ 779 public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2; 780 /** 781 * Sets video scaling mode. To make the target video scaling mode 782 * effective during playback, this method must be called after 783 * data source is set. If not called, the default video 784 * scaling mode is {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}. 785 * 786 * <p> The supported video scaling modes are: 787 * <ul> 788 * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT} 789 * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING} 790 * </ul> 791 * 792 * @param mode target video scaling mode. Must be one of the supported 793 * video scaling modes; otherwise, IllegalArgumentException will be thrown. 794 * 795 * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT 796 * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING 797 */ 798 public void setVideoScalingMode(int mode) { 799 if (!isVideoScalingModeSupported(mode)) { 800 final String msg = "Scaling mode " + mode + " is not supported"; 801 throw new IllegalArgumentException(msg); 802 } 803 Parcel request = Parcel.obtain(); 804 Parcel reply = Parcel.obtain(); 805 try { 806 request.writeInterfaceToken(IMEDIA_PLAYER); 807 request.writeInt(INVOKE_ID_SET_VIDEO_SCALE_MODE); 808 request.writeInt(mode); 809 invoke(request, reply); 810 } finally { 811 request.recycle(); 812 reply.recycle(); 813 } 814 } 815 816 /** 817 * Convenience method to create a MediaPlayer for a given Uri. 818 * On success, {@link #prepare()} will already have been called and must not be called again. 819 * <p>When done with the MediaPlayer, you should call {@link #release()}, 820 * to free the resources. If not released, too many MediaPlayer instances will 821 * result in an exception.</p> 822 * <p>Note that since {@link #prepare()} is called automatically in this method, 823 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 824 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 825 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 826 * 827 * @param context the Context to use 828 * @param uri the Uri from which to get the datasource 829 * @return a MediaPlayer object, or null if creation failed 830 */ 831 public static MediaPlayer create(Context context, Uri uri) { 832 return create (context, uri, null); 833 } 834 835 /** 836 * Convenience method to create a MediaPlayer for a given Uri. 837 * On success, {@link #prepare()} will already have been called and must not be called again. 838 * <p>When done with the MediaPlayer, you should call {@link #release()}, 839 * to free the resources. If not released, too many MediaPlayer instances will 840 * result in an exception.</p> 841 * <p>Note that since {@link #prepare()} is called automatically in this method, 842 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 843 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 844 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 845 * 846 * @param context the Context to use 847 * @param uri the Uri from which to get the datasource 848 * @param holder the SurfaceHolder to use for displaying the video 849 * @return a MediaPlayer object, or null if creation failed 850 */ 851 public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder) { 852 int s = AudioSystem.newAudioSessionId(); 853 return create(context, uri, holder, null, s > 0 ? s : 0); 854 } 855 856 /** 857 * Same factory method as {@link #create(Context, Uri, SurfaceHolder)} but that lets you specify 858 * the audio attributes and session ID to be used by the new MediaPlayer instance. 859 * @param context the Context to use 860 * @param uri the Uri from which to get the datasource 861 * @param holder the SurfaceHolder to use for displaying the video, may be null. 862 * @param audioAttributes the {@link AudioAttributes} to be used by the media player. 863 * @param audioSessionId the audio session ID to be used by the media player, 864 * see {@link AudioManager#generateAudioSessionId()} to obtain a new session. 865 * @return a MediaPlayer object, or null if creation failed 866 */ 867 public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder, 868 AudioAttributes audioAttributes, int audioSessionId) { 869 870 try { 871 MediaPlayer mp = new MediaPlayer(); 872 final AudioAttributes aa = audioAttributes != null ? audioAttributes : 873 new AudioAttributes.Builder().build(); 874 mp.setAudioAttributes(aa); 875 mp.setAudioSessionId(audioSessionId); 876 mp.setDataSource(context, uri); 877 if (holder != null) { 878 mp.setDisplay(holder); 879 } 880 mp.prepare(); 881 return mp; 882 } catch (IOException ex) { 883 Log.d(TAG, "create failed:", ex); 884 // fall through 885 } catch (IllegalArgumentException ex) { 886 Log.d(TAG, "create failed:", ex); 887 // fall through 888 } catch (SecurityException ex) { 889 Log.d(TAG, "create failed:", ex); 890 // fall through 891 } 892 893 return null; 894 } 895 896 // Note no convenience method to create a MediaPlayer with SurfaceTexture sink. 897 898 /** 899 * Convenience method to create a MediaPlayer for a given resource id. 900 * On success, {@link #prepare()} will already have been called and must not be called again. 901 * <p>When done with the MediaPlayer, you should call {@link #release()}, 902 * to free the resources. If not released, too many MediaPlayer instances will 903 * result in an exception.</p> 904 * <p>Note that since {@link #prepare()} is called automatically in this method, 905 * you cannot change the audio stream type (see {@link #setAudioStreamType(int)}), audio 906 * session ID (see {@link #setAudioSessionId(int)}) or audio attributes 907 * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p> 908 * 909 * @param context the Context to use 910 * @param resid the raw resource id (<var>R.raw.<something></var>) for 911 * the resource to use as the datasource 912 * @return a MediaPlayer object, or null if creation failed 913 */ 914 public static MediaPlayer create(Context context, int resid) { 915 int s = AudioSystem.newAudioSessionId(); 916 return create(context, resid, null, s > 0 ? s : 0); 917 } 918 919 /** 920 * Same factory method as {@link #create(Context, int)} but that lets you specify the audio 921 * attributes and session ID to be used by the new MediaPlayer instance. 922 * @param context the Context to use 923 * @param resid the raw resource id (<var>R.raw.<something></var>) for 924 * the resource to use as the datasource 925 * @param audioAttributes the {@link AudioAttributes} to be used by the media player. 926 * @param audioSessionId the audio session ID to be used by the media player, 927 * see {@link AudioManager#generateAudioSessionId()} to obtain a new session. 928 * @return a MediaPlayer object, or null if creation failed 929 */ 930 public static MediaPlayer create(Context context, int resid, 931 AudioAttributes audioAttributes, int audioSessionId) { 932 try { 933 AssetFileDescriptor afd = context.getResources().openRawResourceFd(resid); 934 if (afd == null) return null; 935 936 MediaPlayer mp = new MediaPlayer(); 937 938 final AudioAttributes aa = audioAttributes != null ? audioAttributes : 939 new AudioAttributes.Builder().build(); 940 mp.setAudioAttributes(aa); 941 mp.setAudioSessionId(audioSessionId); 942 943 mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength()); 944 afd.close(); 945 mp.prepare(); 946 return mp; 947 } catch (IOException ex) { 948 Log.d(TAG, "create failed:", ex); 949 // fall through 950 } catch (IllegalArgumentException ex) { 951 Log.d(TAG, "create failed:", ex); 952 // fall through 953 } catch (SecurityException ex) { 954 Log.d(TAG, "create failed:", ex); 955 // fall through 956 } 957 return null; 958 } 959 960 /** 961 * Sets the data source as a content Uri. 962 * 963 * @param context the Context to use when resolving the Uri 964 * @param uri the Content URI of the data you want to play 965 * @throws IllegalStateException if it is called in an invalid state 966 */ 967 public void setDataSource(Context context, Uri uri) 968 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 969 setDataSource(context, uri, null); 970 } 971 972 /** 973 * Sets the data source as a content Uri. 974 * 975 * @param context the Context to use when resolving the Uri 976 * @param uri the Content URI of the data you want to play 977 * @param headers the headers to be sent together with the request for the data 978 * Note that the cross domain redirection is allowed by default, but that can be 979 * changed with key/value pairs through the headers parameter with 980 * "android-allow-cross-domain-redirect" as the key and "0" or "1" as the value 981 * to disallow or allow cross domain redirection. 982 * @throws IllegalStateException if it is called in an invalid state 983 */ 984 public void setDataSource(Context context, Uri uri, Map<String, String> headers) 985 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 986 final String scheme = uri.getScheme(); 987 if (ContentResolver.SCHEME_FILE.equals(scheme)) { 988 setDataSource(uri.getPath()); 989 return; 990 } else if (ContentResolver.SCHEME_CONTENT.equals(scheme) 991 && Settings.AUTHORITY.equals(uri.getAuthority())) { 992 // Redirect ringtones to go directly to underlying provider 993 uri = RingtoneManager.getActualDefaultRingtoneUri(context, 994 RingtoneManager.getDefaultType(uri)); 995 if (uri == null) { 996 throw new FileNotFoundException("Failed to resolve default ringtone"); 997 } 998 } 999 1000 AssetFileDescriptor fd = null; 1001 try { 1002 ContentResolver resolver = context.getContentResolver(); 1003 fd = resolver.openAssetFileDescriptor(uri, "r"); 1004 if (fd == null) { 1005 return; 1006 } 1007 // Note: using getDeclaredLength so that our behavior is the same 1008 // as previous versions when the content provider is returning 1009 // a full file. 1010 if (fd.getDeclaredLength() < 0) { 1011 setDataSource(fd.getFileDescriptor()); 1012 } else { 1013 setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getDeclaredLength()); 1014 } 1015 return; 1016 } catch (SecurityException | IOException ex) { 1017 Log.w(TAG, "Couldn't open file on client side; trying server side: " + ex); 1018 } finally { 1019 if (fd != null) { 1020 fd.close(); 1021 } 1022 } 1023 1024 setDataSource(uri.toString(), headers); 1025 } 1026 1027 /** 1028 * Sets the data source (file-path or http/rtsp URL) to use. 1029 * 1030 * @param path the path of the file, or the http/rtsp URL of the stream you want to play 1031 * @throws IllegalStateException if it is called in an invalid state 1032 * 1033 * <p>When <code>path</code> refers to a local file, the file may actually be opened by a 1034 * process other than the calling application. This implies that the pathname 1035 * should be an absolute path (as any other process runs with unspecified current working 1036 * directory), and that the pathname should reference a world-readable file. 1037 * As an alternative, the application could first open the file for reading, 1038 * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}. 1039 */ 1040 public void setDataSource(String path) 1041 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 1042 setDataSource(path, null, null); 1043 } 1044 1045 /** 1046 * Sets the data source (file-path or http/rtsp URL) to use. 1047 * 1048 * @param path the path of the file, or the http/rtsp URL of the stream you want to play 1049 * @param headers the headers associated with the http request for the stream you want to play 1050 * @throws IllegalStateException if it is called in an invalid state 1051 * @hide pending API council 1052 */ 1053 public void setDataSource(String path, Map<String, String> headers) 1054 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException 1055 { 1056 String[] keys = null; 1057 String[] values = null; 1058 1059 if (headers != null) { 1060 keys = new String[headers.size()]; 1061 values = new String[headers.size()]; 1062 1063 int i = 0; 1064 for (Map.Entry<String, String> entry: headers.entrySet()) { 1065 keys[i] = entry.getKey(); 1066 values[i] = entry.getValue(); 1067 ++i; 1068 } 1069 } 1070 setDataSource(path, keys, values); 1071 } 1072 1073 private void setDataSource(String path, String[] keys, String[] values) 1074 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException { 1075 final Uri uri = Uri.parse(path); 1076 final String scheme = uri.getScheme(); 1077 if ("file".equals(scheme)) { 1078 path = uri.getPath(); 1079 } else if (scheme != null) { 1080 // handle non-file sources 1081 nativeSetDataSource( 1082 MediaHTTPService.createHttpServiceBinderIfNecessary(path), 1083 path, 1084 keys, 1085 values); 1086 return; 1087 } 1088 1089 final File file = new File(path); 1090 if (file.exists()) { 1091 FileInputStream is = new FileInputStream(file); 1092 FileDescriptor fd = is.getFD(); 1093 setDataSource(fd); 1094 is.close(); 1095 } else { 1096 throw new IOException("setDataSource failed."); 1097 } 1098 } 1099 1100 private native void nativeSetDataSource( 1101 IBinder httpServiceBinder, String path, String[] keys, String[] values) 1102 throws IOException, IllegalArgumentException, SecurityException, IllegalStateException; 1103 1104 /** 1105 * Sets the data source (FileDescriptor) to use. It is the caller's responsibility 1106 * to close the file descriptor. It is safe to do so as soon as this call returns. 1107 * 1108 * @param fd the FileDescriptor for the file you want to play 1109 * @throws IllegalStateException if it is called in an invalid state 1110 */ 1111 public void setDataSource(FileDescriptor fd) 1112 throws IOException, IllegalArgumentException, IllegalStateException { 1113 // intentionally less than LONG_MAX 1114 setDataSource(fd, 0, 0x7ffffffffffffffL); 1115 } 1116 1117 /** 1118 * Sets the data source (FileDescriptor) to use. The FileDescriptor must be 1119 * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility 1120 * to close the file descriptor. It is safe to do so as soon as this call returns. 1121 * 1122 * @param fd the FileDescriptor for the file you want to play 1123 * @param offset the offset into the file where the data to be played starts, in bytes 1124 * @param length the length in bytes of the data to be played 1125 * @throws IllegalStateException if it is called in an invalid state 1126 */ 1127 public void setDataSource(FileDescriptor fd, long offset, long length) 1128 throws IOException, IllegalArgumentException, IllegalStateException { 1129 _setDataSource(fd, offset, length); 1130 } 1131 1132 private native void _setDataSource(FileDescriptor fd, long offset, long length) 1133 throws IOException, IllegalArgumentException, IllegalStateException; 1134 1135 /** 1136 * Sets the data source (MediaDataSource) to use. 1137 * 1138 * @param dataSource the MediaDataSource for the media you want to play 1139 * @throws IllegalStateException if it is called in an invalid state 1140 */ 1141 public void setDataSource(MediaDataSource dataSource) 1142 throws IllegalArgumentException, IllegalStateException { 1143 _setDataSource(dataSource); 1144 } 1145 1146 private native void _setDataSource(MediaDataSource dataSource) 1147 throws IllegalArgumentException, IllegalStateException; 1148 1149 /** 1150 * Prepares the player for playback, synchronously. 1151 * 1152 * After setting the datasource and the display surface, you need to either 1153 * call prepare() or prepareAsync(). For files, it is OK to call prepare(), 1154 * which blocks until MediaPlayer is ready for playback. 1155 * 1156 * @throws IllegalStateException if it is called in an invalid state 1157 */ 1158 public void prepare() throws IOException, IllegalStateException { 1159 _prepare(); 1160 scanInternalSubtitleTracks(); 1161 } 1162 1163 private native void _prepare() throws IOException, IllegalStateException; 1164 1165 /** 1166 * Prepares the player for playback, asynchronously. 1167 * 1168 * After setting the datasource and the display surface, you need to either 1169 * call prepare() or prepareAsync(). For streams, you should call prepareAsync(), 1170 * which returns immediately, rather than blocking until enough data has been 1171 * buffered. 1172 * 1173 * @throws IllegalStateException if it is called in an invalid state 1174 */ 1175 public native void prepareAsync() throws IllegalStateException; 1176 1177 /** 1178 * Starts or resumes playback. If playback had previously been paused, 1179 * playback will continue from where it was paused. If playback had 1180 * been stopped, or never started before, playback will start at the 1181 * beginning. 1182 * 1183 * @throws IllegalStateException if it is called in an invalid state 1184 */ 1185 public void start() throws IllegalStateException { 1186 if (isRestricted()) { 1187 _setVolume(0, 0); 1188 } 1189 stayAwake(true); 1190 _start(); 1191 } 1192 1193 private native void _start() throws IllegalStateException; 1194 1195 private boolean isRestricted() { 1196 if (mBypassInterruptionPolicy) { 1197 return false; 1198 } 1199 try { 1200 final int usage = mUsage != -1 ? mUsage 1201 : AudioAttributes.usageForLegacyStreamType(getAudioStreamType()); 1202 final int mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO, usage, 1203 Process.myUid(), ActivityThread.currentPackageName()); 1204 return mode != AppOpsManager.MODE_ALLOWED; 1205 } catch (RemoteException e) { 1206 return false; 1207 } 1208 } 1209 1210 private int getAudioStreamType() { 1211 if (mStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) { 1212 mStreamType = _getAudioStreamType(); 1213 } 1214 return mStreamType; 1215 } 1216 1217 private native int _getAudioStreamType() throws IllegalStateException; 1218 1219 /** 1220 * Stops playback after playback has been stopped or paused. 1221 * 1222 * @throws IllegalStateException if the internal player engine has not been 1223 * initialized. 1224 */ 1225 public void stop() throws IllegalStateException { 1226 stayAwake(false); 1227 _stop(); 1228 } 1229 1230 private native void _stop() throws IllegalStateException; 1231 1232 /** 1233 * Pauses playback. Call start() to resume. 1234 * 1235 * @throws IllegalStateException if the internal player engine has not been 1236 * initialized. 1237 */ 1238 public void pause() throws IllegalStateException { 1239 stayAwake(false); 1240 _pause(); 1241 } 1242 1243 private native void _pause() throws IllegalStateException; 1244 1245 /** 1246 * Set the low-level power management behavior for this MediaPlayer. This 1247 * can be used when the MediaPlayer is not playing through a SurfaceHolder 1248 * set with {@link #setDisplay(SurfaceHolder)} and thus can use the 1249 * high-level {@link #setScreenOnWhilePlaying(boolean)} feature. 1250 * 1251 * <p>This function has the MediaPlayer access the low-level power manager 1252 * service to control the device's power usage while playing is occurring. 1253 * The parameter is a combination of {@link android.os.PowerManager} wake flags. 1254 * Use of this method requires {@link android.Manifest.permission#WAKE_LOCK} 1255 * permission. 1256 * By default, no attempt is made to keep the device awake during playback. 1257 * 1258 * @param context the Context to use 1259 * @param mode the power/wake mode to set 1260 * @see android.os.PowerManager 1261 */ 1262 public void setWakeMode(Context context, int mode) { 1263 boolean washeld = false; 1264 1265 /* Disable persistant wakelocks in media player based on property */ 1266 if (SystemProperties.getBoolean("audio.offload.ignore_setawake", false) == true) { 1267 Log.w(TAG, "IGNORING setWakeMode " + mode); 1268 return; 1269 } 1270 1271 if (mWakeLock != null) { 1272 if (mWakeLock.isHeld()) { 1273 washeld = true; 1274 mWakeLock.release(); 1275 } 1276 mWakeLock = null; 1277 } 1278 1279 PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE); 1280 mWakeLock = pm.newWakeLock(mode|PowerManager.ON_AFTER_RELEASE, MediaPlayer.class.getName()); 1281 mWakeLock.setReferenceCounted(false); 1282 if (washeld) { 1283 mWakeLock.acquire(); 1284 } 1285 } 1286 1287 /** 1288 * Control whether we should use the attached SurfaceHolder to keep the 1289 * screen on while video playback is occurring. This is the preferred 1290 * method over {@link #setWakeMode} where possible, since it doesn't 1291 * require that the application have permission for low-level wake lock 1292 * access. 1293 * 1294 * @param screenOn Supply true to keep the screen on, false to allow it 1295 * to turn off. 1296 */ 1297 public void setScreenOnWhilePlaying(boolean screenOn) { 1298 if (mScreenOnWhilePlaying != screenOn) { 1299 if (screenOn && mSurfaceHolder == null) { 1300 Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective without a SurfaceHolder"); 1301 } 1302 mScreenOnWhilePlaying = screenOn; 1303 updateSurfaceScreenOn(); 1304 } 1305 } 1306 1307 private void stayAwake(boolean awake) { 1308 if (mWakeLock != null) { 1309 if (awake && !mWakeLock.isHeld()) { 1310 mWakeLock.acquire(); 1311 } else if (!awake && mWakeLock.isHeld()) { 1312 mWakeLock.release(); 1313 } 1314 } 1315 mStayAwake = awake; 1316 updateSurfaceScreenOn(); 1317 } 1318 1319 private void updateSurfaceScreenOn() { 1320 if (mSurfaceHolder != null) { 1321 mSurfaceHolder.setKeepScreenOn(mScreenOnWhilePlaying && mStayAwake); 1322 } 1323 } 1324 1325 /** 1326 * Returns the width of the video. 1327 * 1328 * @return the width of the video, or 0 if there is no video, 1329 * no display surface was set, or the width has not been determined 1330 * yet. The OnVideoSizeChangedListener can be registered via 1331 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)} 1332 * to provide a notification when the width is available. 1333 */ 1334 public native int getVideoWidth(); 1335 1336 /** 1337 * Returns the height of the video. 1338 * 1339 * @return the height of the video, or 0 if there is no video, 1340 * no display surface was set, or the height has not been determined 1341 * yet. The OnVideoSizeChangedListener can be registered via 1342 * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)} 1343 * to provide a notification when the height is available. 1344 */ 1345 public native int getVideoHeight(); 1346 1347 /** 1348 * Checks whether the MediaPlayer is playing. 1349 * 1350 * @return true if currently playing, false otherwise 1351 * @throws IllegalStateException if the internal player engine has not been 1352 * initialized or has been released. 1353 */ 1354 public native boolean isPlaying(); 1355 1356 /** 1357 * Change playback speed of audio by resampling the audio. 1358 * <p> 1359 * Specifies resampling as audio mode for variable rate playback, i.e., 1360 * resample the waveform based on the requested playback rate to get 1361 * a new waveform, and play back the new waveform at the original sampling 1362 * frequency. 1363 * When rate is larger than 1.0, pitch becomes higher. 1364 * When rate is smaller than 1.0, pitch becomes lower. 1365 * 1366 * @hide 1367 */ 1368 public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2; 1369 1370 /** 1371 * Change playback speed of audio without changing its pitch. 1372 * <p> 1373 * Specifies time stretching as audio mode for variable rate playback. 1374 * Time stretching changes the duration of the audio samples without 1375 * affecting its pitch. 1376 * <p> 1377 * This mode is only supported for a limited range of playback speed factors, 1378 * e.g. between 1/2x and 2x. 1379 * 1380 * @hide 1381 */ 1382 public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1; 1383 1384 /** 1385 * Change playback speed of audio without changing its pitch, and 1386 * possibly mute audio if time stretching is not supported for the playback 1387 * speed. 1388 * <p> 1389 * Try to keep audio pitch when changing the playback rate, but allow the 1390 * system to determine how to change audio playback if the rate is out 1391 * of range. 1392 * 1393 * @hide 1394 */ 1395 public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0; 1396 1397 /** @hide */ 1398 @IntDef( 1399 value = { 1400 PLAYBACK_RATE_AUDIO_MODE_DEFAULT, 1401 PLAYBACK_RATE_AUDIO_MODE_STRETCH, 1402 PLAYBACK_RATE_AUDIO_MODE_RESAMPLE, 1403 }) 1404 @Retention(RetentionPolicy.SOURCE) 1405 public @interface PlaybackRateAudioMode {} 1406 1407 /** 1408 * Sets playback rate and audio mode. 1409 * 1410 * @param rate the ratio between desired playback rate and normal one. 1411 * @param audioMode audio playback mode. Must be one of the supported 1412 * audio modes. 1413 * 1414 * @throws IllegalStateException if the internal player engine has not been 1415 * initialized. 1416 * @throws IllegalArgumentException if audioMode is not supported. 1417 * 1418 * @hide 1419 */ 1420 @NonNull 1421 public PlaybackParams easyPlaybackParams(float rate, @PlaybackRateAudioMode int audioMode) { 1422 PlaybackParams params = new PlaybackParams(); 1423 params.allowDefaults(); 1424 switch (audioMode) { 1425 case PLAYBACK_RATE_AUDIO_MODE_DEFAULT: 1426 params.setSpeed(rate).setPitch(1.0f); 1427 break; 1428 case PLAYBACK_RATE_AUDIO_MODE_STRETCH: 1429 params.setSpeed(rate).setPitch(1.0f) 1430 .setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL); 1431 break; 1432 case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE: 1433 params.setSpeed(rate).setPitch(rate); 1434 break; 1435 default: 1436 final String msg = "Audio playback mode " + audioMode + " is not supported"; 1437 throw new IllegalArgumentException(msg); 1438 } 1439 return params; 1440 } 1441 1442 /** 1443 * Sets playback rate using {@link PlaybackParams}. 1444 * 1445 * @param params the playback params. 1446 * 1447 * @throws IllegalStateException if the internal player engine has not been 1448 * initialized. 1449 * @throws IllegalArgumentException if params is not supported. 1450 */ 1451 public native void setPlaybackParams(@NonNull PlaybackParams params); 1452 1453 /** 1454 * Gets the playback params, containing the current playback rate. 1455 * 1456 * @return the playback params. 1457 * @throws IllegalStateException if the internal player engine has not been 1458 * initialized. 1459 */ 1460 @NonNull 1461 public native PlaybackParams getPlaybackParams(); 1462 1463 /** 1464 * Sets A/V sync mode. 1465 * 1466 * @param params the A/V sync params to apply 1467 * 1468 * @throws IllegalStateException if the internal player engine has not been 1469 * initialized. 1470 * @throws IllegalArgumentException if params are not supported. 1471 */ 1472 public native void setSyncParams(@NonNull SyncParams params); 1473 1474 /** 1475 * Gets the A/V sync mode. 1476 * 1477 * @return the A/V sync params 1478 * 1479 * @throws IllegalStateException if the internal player engine has not been 1480 * initialized. 1481 */ 1482 @NonNull 1483 public native SyncParams getSyncParams(); 1484 1485 /** 1486 * Seeks to specified time position. 1487 * 1488 * @param msec the offset in milliseconds from the start to seek to 1489 * @throws IllegalStateException if the internal player engine has not been 1490 * initialized 1491 */ 1492 public native void seekTo(int msec) throws IllegalStateException; 1493 1494 /** 1495 * Get current playback position as a {@link MediaTimestamp}. 1496 * <p> 1497 * The MediaTimestamp represents how the media time correlates to the system time in 1498 * a linear fashion using an anchor and a clock rate. During regular playback, the media 1499 * time moves fairly constantly (though the anchor frame may be rebased to a current 1500 * system time, the linear correlation stays steady). Therefore, this method does not 1501 * need to be called often. 1502 * <p> 1503 * To help users get current playback position, this method always anchors the timestamp 1504 * to the current {@link System#nanoTime system time}, so 1505 * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position. 1506 * 1507 * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp 1508 * is available, e.g. because the media player has not been initialized. 1509 * 1510 * @see MediaTimestamp 1511 */ 1512 @Nullable 1513 public MediaTimestamp getTimestamp() 1514 { 1515 try { 1516 // TODO: get the timestamp from native side 1517 return new MediaTimestamp( 1518 getCurrentPosition() * 1000L, 1519 System.nanoTime(), 1520 isPlaying() ? getPlaybackParams().getSpeed() : 0.f); 1521 } catch (IllegalStateException e) { 1522 return null; 1523 } 1524 } 1525 1526 /** 1527 * Gets the current playback position. 1528 * 1529 * @return the current position in milliseconds 1530 */ 1531 public native int getCurrentPosition(); 1532 1533 /** 1534 * Gets the duration of the file. 1535 * 1536 * @return the duration in milliseconds, if no duration is available 1537 * (for example, if streaming live content), -1 is returned. 1538 */ 1539 public native int getDuration(); 1540 1541 /** 1542 * Gets the media metadata. 1543 * 1544 * @param update_only controls whether the full set of available 1545 * metadata is returned or just the set that changed since the 1546 * last call. See {@see #METADATA_UPDATE_ONLY} and {@see 1547 * #METADATA_ALL}. 1548 * 1549 * @param apply_filter if true only metadata that matches the 1550 * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see 1551 * #BYPASS_METADATA_FILTER}. 1552 * 1553 * @return The metadata, possibly empty. null if an error occured. 1554 // FIXME: unhide. 1555 * {@hide} 1556 */ 1557 public Metadata getMetadata(final boolean update_only, 1558 final boolean apply_filter) { 1559 Parcel reply = Parcel.obtain(); 1560 Metadata data = new Metadata(); 1561 1562 if (!native_getMetadata(update_only, apply_filter, reply)) { 1563 reply.recycle(); 1564 return null; 1565 } 1566 1567 // Metadata takes over the parcel, don't recycle it unless 1568 // there is an error. 1569 if (!data.parse(reply)) { 1570 reply.recycle(); 1571 return null; 1572 } 1573 return data; 1574 } 1575 1576 /** 1577 * Set a filter for the metadata update notification and update 1578 * retrieval. The caller provides 2 set of metadata keys, allowed 1579 * and blocked. The blocked set always takes precedence over the 1580 * allowed one. 1581 * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as 1582 * shorthands to allow/block all or no metadata. 1583 * 1584 * By default, there is no filter set. 1585 * 1586 * @param allow Is the set of metadata the client is interested 1587 * in receiving new notifications for. 1588 * @param block Is the set of metadata the client is not interested 1589 * in receiving new notifications for. 1590 * @return The call status code. 1591 * 1592 // FIXME: unhide. 1593 * {@hide} 1594 */ 1595 public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) { 1596 // Do our serialization manually instead of calling 1597 // Parcel.writeArray since the sets are made of the same type 1598 // we avoid paying the price of calling writeValue (used by 1599 // writeArray) which burns an extra int per element to encode 1600 // the type. 1601 Parcel request = newRequest(); 1602 1603 // The parcel starts already with an interface token. There 1604 // are 2 filters. Each one starts with a 4bytes number to 1605 // store the len followed by a number of int (4 bytes as well) 1606 // representing the metadata type. 1607 int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size()); 1608 1609 if (request.dataCapacity() < capacity) { 1610 request.setDataCapacity(capacity); 1611 } 1612 1613 request.writeInt(allow.size()); 1614 for(Integer t: allow) { 1615 request.writeInt(t); 1616 } 1617 request.writeInt(block.size()); 1618 for(Integer t: block) { 1619 request.writeInt(t); 1620 } 1621 return native_setMetadataFilter(request); 1622 } 1623 1624 /** 1625 * Set the MediaPlayer to start when this MediaPlayer finishes playback 1626 * (i.e. reaches the end of the stream). 1627 * The media framework will attempt to transition from this player to 1628 * the next as seamlessly as possible. The next player can be set at 1629 * any time before completion. The next player must be prepared by the 1630 * app, and the application should not call start() on it. 1631 * The next MediaPlayer must be different from 'this'. An exception 1632 * will be thrown if next == this. 1633 * The application may call setNextMediaPlayer(null) to indicate no 1634 * next player should be started at the end of playback. 1635 * If the current player is looping, it will keep looping and the next 1636 * player will not be started. 1637 * 1638 * @param next the player to start after this one completes playback. 1639 * 1640 */ 1641 public native void setNextMediaPlayer(MediaPlayer next); 1642 1643 /** 1644 * Releases resources associated with this MediaPlayer object. 1645 * It is considered good practice to call this method when you're 1646 * done using the MediaPlayer. In particular, whenever an Activity 1647 * of an application is paused (its onPause() method is called), 1648 * or stopped (its onStop() method is called), this method should be 1649 * invoked to release the MediaPlayer object, unless the application 1650 * has a special need to keep the object around. In addition to 1651 * unnecessary resources (such as memory and instances of codecs) 1652 * being held, failure to call this method immediately if a 1653 * MediaPlayer object is no longer needed may also lead to 1654 * continuous battery consumption for mobile devices, and playback 1655 * failure for other applications if no multiple instances of the 1656 * same codec are supported on a device. Even if multiple instances 1657 * of the same codec are supported, some performance degradation 1658 * may be expected when unnecessary multiple instances are used 1659 * at the same time. 1660 */ 1661 public void release() { 1662 stayAwake(false); 1663 updateSurfaceScreenOn(); 1664 mOnPreparedListener = null; 1665 mOnBufferingUpdateListener = null; 1666 mOnCompletionListener = null; 1667 mOnSeekCompleteListener = null; 1668 mOnErrorListener = null; 1669 mOnInfoListener = null; 1670 mOnVideoSizeChangedListener = null; 1671 mOnTimedTextListener = null; 1672 if (mTimeProvider != null) { 1673 mTimeProvider.close(); 1674 mTimeProvider = null; 1675 } 1676 mOnSubtitleDataListener = null; 1677 _release(); 1678 } 1679 1680 private native void _release(); 1681 1682 /** 1683 * Resets the MediaPlayer to its uninitialized state. After calling 1684 * this method, you will have to initialize it again by setting the 1685 * data source and calling prepare(). 1686 */ 1687 public void reset() { 1688 mSelectedSubtitleTrackIndex = -1; 1689 synchronized(mOpenSubtitleSources) { 1690 for (final InputStream is: mOpenSubtitleSources) { 1691 try { 1692 is.close(); 1693 } catch (IOException e) { 1694 } 1695 } 1696 mOpenSubtitleSources.clear(); 1697 } 1698 if (mSubtitleController != null) { 1699 mSubtitleController.reset(); 1700 } 1701 if (mTimeProvider != null) { 1702 mTimeProvider.close(); 1703 mTimeProvider = null; 1704 } 1705 1706 stayAwake(false); 1707 _reset(); 1708 // make sure none of the listeners get called anymore 1709 if (mEventHandler != null) { 1710 mEventHandler.removeCallbacksAndMessages(null); 1711 } 1712 1713 synchronized (mIndexTrackPairs) { 1714 mIndexTrackPairs.clear(); 1715 mInbandTrackIndices.clear(); 1716 }; 1717 } 1718 1719 private native void _reset(); 1720 1721 /** 1722 * Sets the audio stream type for this MediaPlayer. See {@link AudioManager} 1723 * for a list of stream types. Must call this method before prepare() or 1724 * prepareAsync() in order for the target stream type to become effective 1725 * thereafter. 1726 * 1727 * @param streamtype the audio stream type 1728 * @see android.media.AudioManager 1729 */ 1730 public void setAudioStreamType(int streamtype) { 1731 _setAudioStreamType(streamtype); 1732 mStreamType = streamtype; 1733 } 1734 1735 private native void _setAudioStreamType(int streamtype); 1736 1737 // Keep KEY_PARAMETER_* in sync with include/media/mediaplayer.h 1738 private final static int KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400; 1739 /** 1740 * Sets the parameter indicated by key. 1741 * @param key key indicates the parameter to be set. 1742 * @param value value of the parameter to be set. 1743 * @return true if the parameter is set successfully, false otherwise 1744 * {@hide} 1745 */ 1746 private native boolean setParameter(int key, Parcel value); 1747 1748 /** 1749 * Sets the audio attributes for this MediaPlayer. 1750 * See {@link AudioAttributes} for how to build and configure an instance of this class. 1751 * You must call this method before {@link #prepare()} or {@link #prepareAsync()} in order 1752 * for the audio attributes to become effective thereafter. 1753 * @param attributes a non-null set of audio attributes 1754 */ 1755 public void setAudioAttributes(AudioAttributes attributes) throws IllegalArgumentException { 1756 if (attributes == null) { 1757 final String msg = "Cannot set AudioAttributes to null"; 1758 throw new IllegalArgumentException(msg); 1759 } 1760 mUsage = attributes.getUsage(); 1761 mBypassInterruptionPolicy = (attributes.getAllFlags() 1762 & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0; 1763 Parcel pattributes = Parcel.obtain(); 1764 attributes.writeToParcel(pattributes, AudioAttributes.FLATTEN_TAGS); 1765 setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, pattributes); 1766 pattributes.recycle(); 1767 } 1768 1769 /** 1770 * Sets the player to be looping or non-looping. 1771 * 1772 * @param looping whether to loop or not 1773 */ 1774 public native void setLooping(boolean looping); 1775 1776 /** 1777 * Checks whether the MediaPlayer is looping or non-looping. 1778 * 1779 * @return true if the MediaPlayer is currently looping, false otherwise 1780 */ 1781 public native boolean isLooping(); 1782 1783 /** 1784 * Sets the volume on this player. 1785 * This API is recommended for balancing the output of audio streams 1786 * within an application. Unless you are writing an application to 1787 * control user settings, this API should be used in preference to 1788 * {@link AudioManager#setStreamVolume(int, int, int)} which sets the volume of ALL streams of 1789 * a particular type. Note that the passed volume values are raw scalars in range 0.0 to 1.0. 1790 * UI controls should be scaled logarithmically. 1791 * 1792 * @param leftVolume left volume scalar 1793 * @param rightVolume right volume scalar 1794 */ 1795 /* 1796 * FIXME: Merge this into javadoc comment above when setVolume(float) is not @hide. 1797 * The single parameter form below is preferred if the channel volumes don't need 1798 * to be set independently. 1799 */ 1800 public void setVolume(float leftVolume, float rightVolume) { 1801 if (isRestricted()) { 1802 return; 1803 } 1804 _setVolume(leftVolume, rightVolume); 1805 } 1806 1807 private native void _setVolume(float leftVolume, float rightVolume); 1808 1809 /** 1810 * Similar, excepts sets volume of all channels to same value. 1811 * @hide 1812 */ 1813 public void setVolume(float volume) { 1814 setVolume(volume, volume); 1815 } 1816 1817 /** 1818 * Sets the audio session ID. 1819 * 1820 * @param sessionId the audio session ID. 1821 * The audio session ID is a system wide unique identifier for the audio stream played by 1822 * this MediaPlayer instance. 1823 * The primary use of the audio session ID is to associate audio effects to a particular 1824 * instance of MediaPlayer: if an audio session ID is provided when creating an audio effect, 1825 * this effect will be applied only to the audio content of media players within the same 1826 * audio session and not to the output mix. 1827 * When created, a MediaPlayer instance automatically generates its own audio session ID. 1828 * However, it is possible to force this player to be part of an already existing audio session 1829 * by calling this method. 1830 * This method must be called before one of the overloaded <code> setDataSource </code> methods. 1831 * @throws IllegalStateException if it is called in an invalid state 1832 */ 1833 public native void setAudioSessionId(int sessionId) throws IllegalArgumentException, IllegalStateException; 1834 1835 /** 1836 * Returns the audio session ID. 1837 * 1838 * @return the audio session ID. {@see #setAudioSessionId(int)} 1839 * Note that the audio session ID is 0 only if a problem occured when the MediaPlayer was contructed. 1840 */ 1841 public native int getAudioSessionId(); 1842 1843 /** 1844 * Attaches an auxiliary effect to the player. A typical auxiliary effect is a reverberation 1845 * effect which can be applied on any sound source that directs a certain amount of its 1846 * energy to this effect. This amount is defined by setAuxEffectSendLevel(). 1847 * See {@link #setAuxEffectSendLevel(float)}. 1848 * <p>After creating an auxiliary effect (e.g. 1849 * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with 1850 * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling this method 1851 * to attach the player to the effect. 1852 * <p>To detach the effect from the player, call this method with a null effect id. 1853 * <p>This method must be called after one of the overloaded <code> setDataSource </code> 1854 * methods. 1855 * @param effectId system wide unique id of the effect to attach 1856 */ 1857 public native void attachAuxEffect(int effectId); 1858 1859 1860 /** 1861 * Sets the send level of the player to the attached auxiliary effect. 1862 * See {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0. 1863 * <p>By default the send level is 0, so even if an effect is attached to the player 1864 * this method must be called for the effect to be applied. 1865 * <p>Note that the passed level value is a raw scalar. UI controls should be scaled 1866 * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB, 1867 * so an appropriate conversion from linear UI input x to level is: 1868 * x == 0 -> level = 0 1869 * 0 < x <= R -> level = 10^(72*(x-R)/20/R) 1870 * @param level send level scalar 1871 */ 1872 public void setAuxEffectSendLevel(float level) { 1873 if (isRestricted()) { 1874 return; 1875 } 1876 _setAuxEffectSendLevel(level); 1877 } 1878 1879 private native void _setAuxEffectSendLevel(float level); 1880 1881 /* 1882 * @param request Parcel destinated to the media player. The 1883 * Interface token must be set to the IMediaPlayer 1884 * one to be routed correctly through the system. 1885 * @param reply[out] Parcel that will contain the reply. 1886 * @return The status code. 1887 */ 1888 private native final int native_invoke(Parcel request, Parcel reply); 1889 1890 1891 /* 1892 * @param update_only If true fetch only the set of metadata that have 1893 * changed since the last invocation of getMetadata. 1894 * The set is built using the unfiltered 1895 * notifications the native player sent to the 1896 * MediaPlayerService during that period of 1897 * time. If false, all the metadatas are considered. 1898 * @param apply_filter If true, once the metadata set has been built based on 1899 * the value update_only, the current filter is applied. 1900 * @param reply[out] On return contains the serialized 1901 * metadata. Valid only if the call was successful. 1902 * @return The status code. 1903 */ 1904 private native final boolean native_getMetadata(boolean update_only, 1905 boolean apply_filter, 1906 Parcel reply); 1907 1908 /* 1909 * @param request Parcel with the 2 serialized lists of allowed 1910 * metadata types followed by the one to be 1911 * dropped. Each list starts with an integer 1912 * indicating the number of metadata type elements. 1913 * @return The status code. 1914 */ 1915 private native final int native_setMetadataFilter(Parcel request); 1916 1917 private static native final void native_init(); 1918 private native final void native_setup(Object mediaplayer_this); 1919 private native final void native_finalize(); 1920 1921 /** 1922 * Class for MediaPlayer to return each audio/video/subtitle track's metadata. 1923 * 1924 * @see android.media.MediaPlayer#getTrackInfo 1925 */ 1926 static public class TrackInfo implements Parcelable { 1927 /** 1928 * Gets the track type. 1929 * @return TrackType which indicates if the track is video, audio, timed text. 1930 */ 1931 public int getTrackType() { 1932 return mTrackType; 1933 } 1934 1935 /** 1936 * Gets the language code of the track. 1937 * @return a language code in either way of ISO-639-1 or ISO-639-2. 1938 * When the language is unknown or could not be determined, 1939 * ISO-639-2 language code, "und", is returned. 1940 */ 1941 public String getLanguage() { 1942 String language = mFormat.getString(MediaFormat.KEY_LANGUAGE); 1943 return language == null ? "und" : language; 1944 } 1945 1946 /** 1947 * Gets the {@link MediaFormat} of the track. If the format is 1948 * unknown or could not be determined, null is returned. 1949 */ 1950 public MediaFormat getFormat() { 1951 if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT 1952 || mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 1953 return mFormat; 1954 } 1955 return null; 1956 } 1957 1958 public static final int MEDIA_TRACK_TYPE_UNKNOWN = 0; 1959 public static final int MEDIA_TRACK_TYPE_VIDEO = 1; 1960 public static final int MEDIA_TRACK_TYPE_AUDIO = 2; 1961 public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3; 1962 public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4; 1963 public static final int MEDIA_TRACK_TYPE_METADATA = 5; 1964 1965 final int mTrackType; 1966 final MediaFormat mFormat; 1967 1968 TrackInfo(Parcel in) { 1969 mTrackType = in.readInt(); 1970 // TODO: parcel in the full MediaFormat; currently we are using createSubtitleFormat 1971 // even for audio/video tracks, meaning we only set the mime and language. 1972 String mime = in.readString(); 1973 String language = in.readString(); 1974 mFormat = MediaFormat.createSubtitleFormat(mime, language); 1975 1976 if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 1977 mFormat.setInteger(MediaFormat.KEY_IS_AUTOSELECT, in.readInt()); 1978 mFormat.setInteger(MediaFormat.KEY_IS_DEFAULT, in.readInt()); 1979 mFormat.setInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, in.readInt()); 1980 } 1981 } 1982 1983 /** @hide */ 1984 TrackInfo(int type, MediaFormat format) { 1985 mTrackType = type; 1986 mFormat = format; 1987 } 1988 1989 /** 1990 * {@inheritDoc} 1991 */ 1992 @Override 1993 public int describeContents() { 1994 return 0; 1995 } 1996 1997 /** 1998 * {@inheritDoc} 1999 */ 2000 @Override 2001 public void writeToParcel(Parcel dest, int flags) { 2002 dest.writeInt(mTrackType); 2003 dest.writeString(getLanguage()); 2004 2005 if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) { 2006 dest.writeString(mFormat.getString(MediaFormat.KEY_MIME)); 2007 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_AUTOSELECT)); 2008 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_DEFAULT)); 2009 dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE)); 2010 } 2011 } 2012 2013 @Override 2014 public String toString() { 2015 StringBuilder out = new StringBuilder(128); 2016 out.append(getClass().getName()); 2017 out.append('{'); 2018 switch (mTrackType) { 2019 case MEDIA_TRACK_TYPE_VIDEO: 2020 out.append("VIDEO"); 2021 break; 2022 case MEDIA_TRACK_TYPE_AUDIO: 2023 out.append("AUDIO"); 2024 break; 2025 case MEDIA_TRACK_TYPE_TIMEDTEXT: 2026 out.append("TIMEDTEXT"); 2027 break; 2028 case MEDIA_TRACK_TYPE_SUBTITLE: 2029 out.append("SUBTITLE"); 2030 break; 2031 default: 2032 out.append("UNKNOWN"); 2033 break; 2034 } 2035 out.append(", " + mFormat.toString()); 2036 out.append("}"); 2037 return out.toString(); 2038 } 2039 2040 /** 2041 * Used to read a TrackInfo from a Parcel. 2042 */ 2043 static final Parcelable.Creator<TrackInfo> CREATOR 2044 = new Parcelable.Creator<TrackInfo>() { 2045 @Override 2046 public TrackInfo createFromParcel(Parcel in) { 2047 return new TrackInfo(in); 2048 } 2049 2050 @Override 2051 public TrackInfo[] newArray(int size) { 2052 return new TrackInfo[size]; 2053 } 2054 }; 2055 2056 }; 2057 2058 // We would like domain specific classes with more informative names than the `first` and `second` 2059 // in generic Pair, but we would also like to avoid creating new/trivial classes. As a compromise 2060 // we document the meanings of `first` and `second` here: 2061 // 2062 // Pair.first - inband track index; non-null iff representing an inband track. 2063 // Pair.second - a SubtitleTrack registered with mSubtitleController; non-null iff representing 2064 // an inband subtitle track or any out-of-band track (subtitle or timedtext). 2065 private Vector<Pair<Integer, SubtitleTrack>> mIndexTrackPairs = new Vector<>(); 2066 private BitSet mInbandTrackIndices = new BitSet(); 2067 2068 /** 2069 * Returns an array of track information. 2070 * 2071 * @return Array of track info. The total number of tracks is the array length. 2072 * Must be called again if an external timed text source has been added after any of the 2073 * addTimedTextSource methods are called. 2074 * @throws IllegalStateException if it is called in an invalid state. 2075 */ 2076 public TrackInfo[] getTrackInfo() throws IllegalStateException { 2077 TrackInfo trackInfo[] = getInbandTrackInfo(); 2078 // add out-of-band tracks 2079 synchronized (mIndexTrackPairs) { 2080 TrackInfo allTrackInfo[] = new TrackInfo[mIndexTrackPairs.size()]; 2081 for (int i = 0; i < allTrackInfo.length; i++) { 2082 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2083 if (p.first != null) { 2084 // inband track 2085 allTrackInfo[i] = trackInfo[p.first]; 2086 } else { 2087 SubtitleTrack track = p.second; 2088 allTrackInfo[i] = new TrackInfo(track.getTrackType(), track.getFormat()); 2089 } 2090 } 2091 return allTrackInfo; 2092 } 2093 } 2094 2095 private TrackInfo[] getInbandTrackInfo() throws IllegalStateException { 2096 Parcel request = Parcel.obtain(); 2097 Parcel reply = Parcel.obtain(); 2098 try { 2099 request.writeInterfaceToken(IMEDIA_PLAYER); 2100 request.writeInt(INVOKE_ID_GET_TRACK_INFO); 2101 invoke(request, reply); 2102 TrackInfo trackInfo[] = reply.createTypedArray(TrackInfo.CREATOR); 2103 return trackInfo; 2104 } finally { 2105 request.recycle(); 2106 reply.recycle(); 2107 } 2108 } 2109 2110 /* Do not change these values without updating their counterparts 2111 * in include/media/stagefright/MediaDefs.h and media/libstagefright/MediaDefs.cpp! 2112 */ 2113 /** 2114 * MIME type for SubRip (SRT) container. Used in addTimedTextSource APIs. 2115 */ 2116 public static final String MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip"; 2117 2118 /** 2119 * MIME type for WebVTT subtitle data. 2120 * @hide 2121 */ 2122 public static final String MEDIA_MIMETYPE_TEXT_VTT = "text/vtt"; 2123 2124 /** 2125 * MIME type for CEA-608 closed caption data. 2126 * @hide 2127 */ 2128 public static final String MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608"; 2129 2130 /** 2131 * MIME type for CEA-708 closed caption data. 2132 * @hide 2133 */ 2134 public static final String MEDIA_MIMETYPE_TEXT_CEA_708 = "text/cea-708"; 2135 2136 /* 2137 * A helper function to check if the mime type is supported by media framework. 2138 */ 2139 private static boolean availableMimeTypeForExternalSource(String mimeType) { 2140 if (MEDIA_MIMETYPE_TEXT_SUBRIP.equals(mimeType)) { 2141 return true; 2142 } 2143 return false; 2144 } 2145 2146 private SubtitleController mSubtitleController; 2147 2148 /** @hide */ 2149 public void setSubtitleAnchor( 2150 SubtitleController controller, 2151 SubtitleController.Anchor anchor) { 2152 // TODO: create SubtitleController in MediaPlayer 2153 mSubtitleController = controller; 2154 mSubtitleController.setAnchor(anchor); 2155 } 2156 2157 /** 2158 * The private version of setSubtitleAnchor is used internally to set mSubtitleController if 2159 * necessary when clients don't provide their own SubtitleControllers using the public version 2160 * {@link #setSubtitleAnchor(SubtitleController, Anchor)} (e.g. {@link VideoView} provides one). 2161 */ 2162 private synchronized void setSubtitleAnchor() { 2163 if (mSubtitleController == null) { 2164 final HandlerThread thread = new HandlerThread("SetSubtitleAnchorThread"); 2165 thread.start(); 2166 Handler handler = new Handler(thread.getLooper()); 2167 handler.post(new Runnable() { 2168 @Override 2169 public void run() { 2170 Context context = ActivityThread.currentApplication(); 2171 mSubtitleController = new SubtitleController(context, mTimeProvider, MediaPlayer.this); 2172 mSubtitleController.setAnchor(new Anchor() { 2173 @Override 2174 public void setSubtitleWidget(RenderingWidget subtitleWidget) { 2175 } 2176 2177 @Override 2178 public Looper getSubtitleLooper() { 2179 return Looper.getMainLooper(); 2180 } 2181 }); 2182 thread.getLooper().quitSafely(); 2183 } 2184 }); 2185 try { 2186 thread.join(); 2187 } catch (InterruptedException e) { 2188 Thread.currentThread().interrupt(); 2189 Log.w(TAG, "failed to join SetSubtitleAnchorThread"); 2190 } 2191 } 2192 } 2193 2194 private int mSelectedSubtitleTrackIndex = -1; 2195 private Vector<InputStream> mOpenSubtitleSources; 2196 2197 private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() { 2198 @Override 2199 public void onSubtitleData(MediaPlayer mp, SubtitleData data) { 2200 int index = data.getTrackIndex(); 2201 synchronized (mIndexTrackPairs) { 2202 for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) { 2203 if (p.first != null && p.first == index && p.second != null) { 2204 // inband subtitle track that owns data 2205 SubtitleTrack track = p.second; 2206 track.onData(data); 2207 } 2208 } 2209 } 2210 } 2211 }; 2212 2213 /** @hide */ 2214 @Override 2215 public void onSubtitleTrackSelected(SubtitleTrack track) { 2216 if (mSelectedSubtitleTrackIndex >= 0) { 2217 try { 2218 selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, false); 2219 } catch (IllegalStateException e) { 2220 } 2221 mSelectedSubtitleTrackIndex = -1; 2222 } 2223 setOnSubtitleDataListener(null); 2224 if (track == null) { 2225 return; 2226 } 2227 2228 synchronized (mIndexTrackPairs) { 2229 for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) { 2230 if (p.first != null && p.second == track) { 2231 // inband subtitle track that is selected 2232 mSelectedSubtitleTrackIndex = p.first; 2233 break; 2234 } 2235 } 2236 } 2237 2238 if (mSelectedSubtitleTrackIndex >= 0) { 2239 try { 2240 selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true); 2241 } catch (IllegalStateException e) { 2242 } 2243 setOnSubtitleDataListener(mSubtitleDataListener); 2244 } 2245 // no need to select out-of-band tracks 2246 } 2247 2248 /** @hide */ 2249 public void addSubtitleSource(InputStream is, MediaFormat format) 2250 throws IllegalStateException 2251 { 2252 final InputStream fIs = is; 2253 final MediaFormat fFormat = format; 2254 2255 if (is != null) { 2256 // Ensure all input streams are closed. It is also a handy 2257 // way to implement timeouts in the future. 2258 synchronized(mOpenSubtitleSources) { 2259 mOpenSubtitleSources.add(is); 2260 } 2261 } else { 2262 Log.w(TAG, "addSubtitleSource called with null InputStream"); 2263 } 2264 2265 // process each subtitle in its own thread 2266 final HandlerThread thread = new HandlerThread("SubtitleReadThread", 2267 Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE); 2268 thread.start(); 2269 Handler handler = new Handler(thread.getLooper()); 2270 handler.post(new Runnable() { 2271 private int addTrack() { 2272 if (fIs == null || mSubtitleController == null) { 2273 return MEDIA_INFO_UNSUPPORTED_SUBTITLE; 2274 } 2275 2276 SubtitleTrack track = mSubtitleController.addTrack(fFormat); 2277 if (track == null) { 2278 return MEDIA_INFO_UNSUPPORTED_SUBTITLE; 2279 } 2280 2281 // TODO: do the conversion in the subtitle track 2282 Scanner scanner = new Scanner(fIs, "UTF-8"); 2283 String contents = scanner.useDelimiter("\\A").next(); 2284 synchronized(mOpenSubtitleSources) { 2285 mOpenSubtitleSources.remove(fIs); 2286 } 2287 scanner.close(); 2288 synchronized (mIndexTrackPairs) { 2289 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track)); 2290 } 2291 track.onData(contents.getBytes(), true /* eos */, ~0 /* runID: keep forever */); 2292 return MEDIA_INFO_EXTERNAL_METADATA_UPDATE; 2293 } 2294 2295 public void run() { 2296 int res = addTrack(); 2297 if (mEventHandler != null) { 2298 Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null); 2299 mEventHandler.sendMessage(m); 2300 } 2301 thread.getLooper().quitSafely(); 2302 } 2303 }); 2304 } 2305 2306 private void scanInternalSubtitleTracks() { 2307 if (mSubtitleController == null) { 2308 Log.d(TAG, "setSubtitleAnchor in MediaPlayer"); 2309 setSubtitleAnchor(); 2310 } 2311 2312 populateInbandTracks(); 2313 2314 if (mSubtitleController != null) { 2315 mSubtitleController.selectDefaultTrack(); 2316 } 2317 } 2318 2319 private void populateInbandTracks() { 2320 TrackInfo[] tracks = getInbandTrackInfo(); 2321 synchronized (mIndexTrackPairs) { 2322 for (int i = 0; i < tracks.length; i++) { 2323 if (mInbandTrackIndices.get(i)) { 2324 continue; 2325 } else { 2326 mInbandTrackIndices.set(i); 2327 } 2328 2329 // newly appeared inband track 2330 if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) { 2331 SubtitleTrack track = mSubtitleController.addTrack( 2332 tracks[i].getFormat()); 2333 mIndexTrackPairs.add(Pair.create(i, track)); 2334 } else { 2335 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(i, null)); 2336 } 2337 } 2338 } 2339 } 2340 2341 /* TODO: Limit the total number of external timed text source to a reasonable number. 2342 */ 2343 /** 2344 * Adds an external timed text source file. 2345 * 2346 * Currently supported format is SubRip with the file extension .srt, case insensitive. 2347 * Note that a single external timed text source may contain multiple tracks in it. 2348 * One can find the total number of available tracks using {@link #getTrackInfo()} to see what 2349 * additional tracks become available after this method call. 2350 * 2351 * @param path The file path of external timed text source file. 2352 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2353 * @throws IOException if the file cannot be accessed or is corrupted. 2354 * @throws IllegalArgumentException if the mimeType is not supported. 2355 * @throws IllegalStateException if called in an invalid state. 2356 */ 2357 public void addTimedTextSource(String path, String mimeType) 2358 throws IOException, IllegalArgumentException, IllegalStateException { 2359 if (!availableMimeTypeForExternalSource(mimeType)) { 2360 final String msg = "Illegal mimeType for timed text source: " + mimeType; 2361 throw new IllegalArgumentException(msg); 2362 } 2363 2364 File file = new File(path); 2365 if (file.exists()) { 2366 FileInputStream is = new FileInputStream(file); 2367 FileDescriptor fd = is.getFD(); 2368 addTimedTextSource(fd, mimeType); 2369 is.close(); 2370 } else { 2371 // We do not support the case where the path is not a file. 2372 throw new IOException(path); 2373 } 2374 } 2375 2376 /** 2377 * Adds an external timed text source file (Uri). 2378 * 2379 * Currently supported format is SubRip with the file extension .srt, case insensitive. 2380 * Note that a single external timed text source may contain multiple tracks in it. 2381 * One can find the total number of available tracks using {@link #getTrackInfo()} to see what 2382 * additional tracks become available after this method call. 2383 * 2384 * @param context the Context to use when resolving the Uri 2385 * @param uri the Content URI of the data you want to play 2386 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2387 * @throws IOException if the file cannot be accessed or is corrupted. 2388 * @throws IllegalArgumentException if the mimeType is not supported. 2389 * @throws IllegalStateException if called in an invalid state. 2390 */ 2391 public void addTimedTextSource(Context context, Uri uri, String mimeType) 2392 throws IOException, IllegalArgumentException, IllegalStateException { 2393 String scheme = uri.getScheme(); 2394 if(scheme == null || scheme.equals("file")) { 2395 addTimedTextSource(uri.getPath(), mimeType); 2396 return; 2397 } 2398 2399 AssetFileDescriptor fd = null; 2400 try { 2401 ContentResolver resolver = context.getContentResolver(); 2402 fd = resolver.openAssetFileDescriptor(uri, "r"); 2403 if (fd == null) { 2404 return; 2405 } 2406 addTimedTextSource(fd.getFileDescriptor(), mimeType); 2407 return; 2408 } catch (SecurityException ex) { 2409 } catch (IOException ex) { 2410 } finally { 2411 if (fd != null) { 2412 fd.close(); 2413 } 2414 } 2415 } 2416 2417 /** 2418 * Adds an external timed text source file (FileDescriptor). 2419 * 2420 * It is the caller's responsibility to close the file descriptor. 2421 * It is safe to do so as soon as this call returns. 2422 * 2423 * Currently supported format is SubRip. Note that a single external timed text source may 2424 * contain multiple tracks in it. One can find the total number of available tracks 2425 * using {@link #getTrackInfo()} to see what additional tracks become available 2426 * after this method call. 2427 * 2428 * @param fd the FileDescriptor for the file you want to play 2429 * @param mimeType The mime type of the file. Must be one of the mime types listed above. 2430 * @throws IllegalArgumentException if the mimeType is not supported. 2431 * @throws IllegalStateException if called in an invalid state. 2432 */ 2433 public void addTimedTextSource(FileDescriptor fd, String mimeType) 2434 throws IllegalArgumentException, IllegalStateException { 2435 // intentionally less than LONG_MAX 2436 addTimedTextSource(fd, 0, 0x7ffffffffffffffL, mimeType); 2437 } 2438 2439 /** 2440 * Adds an external timed text file (FileDescriptor). 2441 * 2442 * It is the caller's responsibility to close the file descriptor. 2443 * It is safe to do so as soon as this call returns. 2444 * 2445 * Currently supported format is SubRip. Note that a single external timed text source may 2446 * contain multiple tracks in it. One can find the total number of available tracks 2447 * using {@link #getTrackInfo()} to see what additional tracks become available 2448 * after this method call. 2449 * 2450 * @param fd the FileDescriptor for the file you want to play 2451 * @param offset the offset into the file where the data to be played starts, in bytes 2452 * @param length the length in bytes of the data to be played 2453 * @param mime The mime type of the file. Must be one of the mime types listed above. 2454 * @throws IllegalArgumentException if the mimeType is not supported. 2455 * @throws IllegalStateException if called in an invalid state. 2456 */ 2457 public void addTimedTextSource(FileDescriptor fd, long offset, long length, String mime) 2458 throws IllegalArgumentException, IllegalStateException { 2459 if (!availableMimeTypeForExternalSource(mime)) { 2460 throw new IllegalArgumentException("Illegal mimeType for timed text source: " + mime); 2461 } 2462 2463 FileDescriptor fd2; 2464 try { 2465 fd2 = Libcore.os.dup(fd); 2466 } catch (ErrnoException ex) { 2467 Log.e(TAG, ex.getMessage(), ex); 2468 throw new RuntimeException(ex); 2469 } 2470 2471 final MediaFormat fFormat = new MediaFormat(); 2472 fFormat.setString(MediaFormat.KEY_MIME, mime); 2473 fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1); 2474 2475 // A MediaPlayer created by a VideoView should already have its mSubtitleController set. 2476 if (mSubtitleController == null) { 2477 setSubtitleAnchor(); 2478 } 2479 2480 if (!mSubtitleController.hasRendererFor(fFormat)) { 2481 // test and add not atomic 2482 Context context = ActivityThread.currentApplication(); 2483 mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler)); 2484 } 2485 final SubtitleTrack track = mSubtitleController.addTrack(fFormat); 2486 synchronized (mIndexTrackPairs) { 2487 mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track)); 2488 } 2489 2490 final FileDescriptor fd3 = fd2; 2491 final long offset2 = offset; 2492 final long length2 = length; 2493 final HandlerThread thread = new HandlerThread( 2494 "TimedTextReadThread", 2495 Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE); 2496 thread.start(); 2497 Handler handler = new Handler(thread.getLooper()); 2498 handler.post(new Runnable() { 2499 private int addTrack() { 2500 InputStream is = null; 2501 final ByteArrayOutputStream bos = new ByteArrayOutputStream(); 2502 try { 2503 Libcore.os.lseek(fd3, offset2, OsConstants.SEEK_SET); 2504 byte[] buffer = new byte[4096]; 2505 for (long total = 0; total < length2;) { 2506 int bytesToRead = (int) Math.min(buffer.length, length2 - total); 2507 int bytes = IoBridge.read(fd3, buffer, 0, bytesToRead); 2508 if (bytes < 0) { 2509 break; 2510 } else { 2511 bos.write(buffer, 0, bytes); 2512 total += bytes; 2513 } 2514 } 2515 track.onData(bos.toByteArray(), true /* eos */, ~0 /* runID: keep forever */); 2516 return MEDIA_INFO_EXTERNAL_METADATA_UPDATE; 2517 } catch (Exception e) { 2518 Log.e(TAG, e.getMessage(), e); 2519 return MEDIA_INFO_TIMED_TEXT_ERROR; 2520 } finally { 2521 if (is != null) { 2522 try { 2523 is.close(); 2524 } catch (IOException e) { 2525 Log.e(TAG, e.getMessage(), e); 2526 } 2527 } 2528 } 2529 } 2530 2531 public void run() { 2532 int res = addTrack(); 2533 if (mEventHandler != null) { 2534 Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null); 2535 mEventHandler.sendMessage(m); 2536 } 2537 thread.getLooper().quitSafely(); 2538 } 2539 }); 2540 } 2541 2542 /** 2543 * Returns the index of the audio, video, or subtitle track currently selected for playback, 2544 * The return value is an index into the array returned by {@link #getTrackInfo()}, and can 2545 * be used in calls to {@link #selectTrack(int)} or {@link #deselectTrack(int)}. 2546 * 2547 * @param trackType should be one of {@link TrackInfo#MEDIA_TRACK_TYPE_VIDEO}, 2548 * {@link TrackInfo#MEDIA_TRACK_TYPE_AUDIO}, or 2549 * {@link TrackInfo#MEDIA_TRACK_TYPE_SUBTITLE} 2550 * @return index of the audio, video, or subtitle track currently selected for playback; 2551 * a negative integer is returned when there is no selected track for {@code trackType} or 2552 * when {@code trackType} is not one of audio, video, or subtitle. 2553 * @throws IllegalStateException if called after {@link #release()} 2554 * 2555 * @see #getTrackInfo() 2556 * @see #selectTrack(int) 2557 * @see #deselectTrack(int) 2558 */ 2559 public int getSelectedTrack(int trackType) throws IllegalStateException { 2560 if (mSubtitleController != null 2561 && (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE 2562 || trackType == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT)) { 2563 SubtitleTrack subtitleTrack = mSubtitleController.getSelectedTrack(); 2564 if (subtitleTrack != null) { 2565 synchronized (mIndexTrackPairs) { 2566 for (int i = 0; i < mIndexTrackPairs.size(); i++) { 2567 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2568 if (p.second == subtitleTrack && subtitleTrack.getTrackType() == trackType) { 2569 return i; 2570 } 2571 } 2572 } 2573 } 2574 } 2575 2576 Parcel request = Parcel.obtain(); 2577 Parcel reply = Parcel.obtain(); 2578 try { 2579 request.writeInterfaceToken(IMEDIA_PLAYER); 2580 request.writeInt(INVOKE_ID_GET_SELECTED_TRACK); 2581 request.writeInt(trackType); 2582 invoke(request, reply); 2583 int inbandTrackIndex = reply.readInt(); 2584 synchronized (mIndexTrackPairs) { 2585 for (int i = 0; i < mIndexTrackPairs.size(); i++) { 2586 Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i); 2587 if (p.first != null && p.first == inbandTrackIndex) { 2588 return i; 2589 } 2590 } 2591 } 2592 return -1; 2593 } finally { 2594 request.recycle(); 2595 reply.recycle(); 2596 } 2597 } 2598 2599 /** 2600 * Selects a track. 2601 * <p> 2602 * If a MediaPlayer is in invalid state, it throws an IllegalStateException exception. 2603 * If a MediaPlayer is in <em>Started</em> state, the selected track is presented immediately. 2604 * If a MediaPlayer is not in Started state, it just marks the track to be played. 2605 * </p> 2606 * <p> 2607 * In any valid state, if it is called multiple times on the same type of track (ie. Video, 2608 * Audio, Timed Text), the most recent one will be chosen. 2609 * </p> 2610 * <p> 2611 * The first audio and video tracks are selected by default if available, even though 2612 * this method is not called. However, no timed text track will be selected until 2613 * this function is called. 2614 * </p> 2615 * <p> 2616 * Currently, only timed text tracks or audio tracks can be selected via this method. 2617 * In addition, the support for selecting an audio track at runtime is pretty limited 2618 * in that an audio track can only be selected in the <em>Prepared</em> state. 2619 * </p> 2620 * @param index the index of the track to be selected. The valid range of the index 2621 * is 0..total number of track - 1. The total number of tracks as well as the type of 2622 * each individual track can be found by calling {@link #getTrackInfo()} method. 2623 * @throws IllegalStateException if called in an invalid state. 2624 * 2625 * @see android.media.MediaPlayer#getTrackInfo 2626 */ 2627 public void selectTrack(int index) throws IllegalStateException { 2628 selectOrDeselectTrack(index, true /* select */); 2629 } 2630 2631 /** 2632 * Deselect a track. 2633 * <p> 2634 * Currently, the track must be a timed text track and no audio or video tracks can be 2635 * deselected. If the timed text track identified by index has not been 2636 * selected before, it throws an exception. 2637 * </p> 2638 * @param index the index of the track to be deselected. The valid range of the index 2639 * is 0..total number of tracks - 1. The total number of tracks as well as the type of 2640 * each individual track can be found by calling {@link #getTrackInfo()} method. 2641 * @throws IllegalStateException if called in an invalid state. 2642 * 2643 * @see android.media.MediaPlayer#getTrackInfo 2644 */ 2645 public void deselectTrack(int index) throws IllegalStateException { 2646 selectOrDeselectTrack(index, false /* select */); 2647 } 2648 2649 private void selectOrDeselectTrack(int index, boolean select) 2650 throws IllegalStateException { 2651 // handle subtitle track through subtitle controller 2652 populateInbandTracks(); 2653 2654 Pair<Integer,SubtitleTrack> p = null; 2655 try { 2656 p = mIndexTrackPairs.get(index); 2657 } catch (ArrayIndexOutOfBoundsException e) { 2658 // ignore bad index 2659 return; 2660 } 2661 2662 SubtitleTrack track = p.second; 2663 if (track == null) { 2664 // inband (de)select 2665 selectOrDeselectInbandTrack(p.first, select); 2666 return; 2667 } 2668 2669 if (mSubtitleController == null) { 2670 return; 2671 } 2672 2673 if (!select) { 2674 // out-of-band deselect 2675 if (mSubtitleController.getSelectedTrack() == track) { 2676 mSubtitleController.selectTrack(null); 2677 } else { 2678 Log.w(TAG, "trying to deselect track that was not selected"); 2679 } 2680 return; 2681 } 2682 2683 // out-of-band select 2684 if (track.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT) { 2685 int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT); 2686 synchronized (mIndexTrackPairs) { 2687 if (ttIndex >= 0 && ttIndex < mIndexTrackPairs.size()) { 2688 Pair<Integer,SubtitleTrack> p2 = mIndexTrackPairs.get(ttIndex); 2689 if (p2.first != null && p2.second == null) { 2690 // deselect inband counterpart 2691 selectOrDeselectInbandTrack(p2.first, false); 2692 } 2693 } 2694 } 2695 } 2696 mSubtitleController.selectTrack(track); 2697 } 2698 2699 private void selectOrDeselectInbandTrack(int index, boolean select) 2700 throws IllegalStateException { 2701 Parcel request = Parcel.obtain(); 2702 Parcel reply = Parcel.obtain(); 2703 try { 2704 request.writeInterfaceToken(IMEDIA_PLAYER); 2705 request.writeInt(select? INVOKE_ID_SELECT_TRACK: INVOKE_ID_DESELECT_TRACK); 2706 request.writeInt(index); 2707 invoke(request, reply); 2708 } finally { 2709 request.recycle(); 2710 reply.recycle(); 2711 } 2712 } 2713 2714 2715 /** 2716 * @param reply Parcel with audio/video duration info for battery 2717 tracking usage 2718 * @return The status code. 2719 * {@hide} 2720 */ 2721 public native static int native_pullBatteryData(Parcel reply); 2722 2723 /** 2724 * Sets the target UDP re-transmit endpoint for the low level player. 2725 * Generally, the address portion of the endpoint is an IP multicast 2726 * address, although a unicast address would be equally valid. When a valid 2727 * retransmit endpoint has been set, the media player will not decode and 2728 * render the media presentation locally. Instead, the player will attempt 2729 * to re-multiplex its media data using the Android@Home RTP profile and 2730 * re-transmit to the target endpoint. Receiver devices (which may be 2731 * either the same as the transmitting device or different devices) may 2732 * instantiate, prepare, and start a receiver player using a setDataSource 2733 * URL of the form... 2734 * 2735 * aahRX://<multicastIP>:<port> 2736 * 2737 * to receive, decode and render the re-transmitted content. 2738 * 2739 * setRetransmitEndpoint may only be called before setDataSource has been 2740 * called; while the player is in the Idle state. 2741 * 2742 * @param endpoint the address and UDP port of the re-transmission target or 2743 * null if no re-transmission is to be performed. 2744 * @throws IllegalStateException if it is called in an invalid state 2745 * @throws IllegalArgumentException if the retransmit endpoint is supplied, 2746 * but invalid. 2747 * 2748 * {@hide} pending API council 2749 */ 2750 public void setRetransmitEndpoint(InetSocketAddress endpoint) 2751 throws IllegalStateException, IllegalArgumentException 2752 { 2753 String addrString = null; 2754 int port = 0; 2755 2756 if (null != endpoint) { 2757 addrString = endpoint.getAddress().getHostAddress(); 2758 port = endpoint.getPort(); 2759 } 2760 2761 int ret = native_setRetransmitEndpoint(addrString, port); 2762 if (ret != 0) { 2763 throw new IllegalArgumentException("Illegal re-transmit endpoint; native ret " + ret); 2764 } 2765 } 2766 2767 private native final int native_setRetransmitEndpoint(String addrString, int port); 2768 2769 @Override 2770 protected void finalize() { native_finalize(); } 2771 2772 /* Do not change these values without updating their counterparts 2773 * in include/media/mediaplayer.h! 2774 */ 2775 private static final int MEDIA_NOP = 0; // interface test message 2776 private static final int MEDIA_PREPARED = 1; 2777 private static final int MEDIA_PLAYBACK_COMPLETE = 2; 2778 private static final int MEDIA_BUFFERING_UPDATE = 3; 2779 private static final int MEDIA_SEEK_COMPLETE = 4; 2780 private static final int MEDIA_SET_VIDEO_SIZE = 5; 2781 private static final int MEDIA_STARTED = 6; 2782 private static final int MEDIA_PAUSED = 7; 2783 private static final int MEDIA_STOPPED = 8; 2784 private static final int MEDIA_SKIPPED = 9; 2785 private static final int MEDIA_TIMED_TEXT = 99; 2786 private static final int MEDIA_ERROR = 100; 2787 private static final int MEDIA_INFO = 200; 2788 private static final int MEDIA_SUBTITLE_DATA = 201; 2789 private static final int MEDIA_META_DATA = 202; 2790 2791 private TimeProvider mTimeProvider; 2792 2793 /** @hide */ 2794 public MediaTimeProvider getMediaTimeProvider() { 2795 if (mTimeProvider == null) { 2796 mTimeProvider = new TimeProvider(this); 2797 } 2798 return mTimeProvider; 2799 } 2800 2801 private class EventHandler extends Handler 2802 { 2803 private MediaPlayer mMediaPlayer; 2804 2805 public EventHandler(MediaPlayer mp, Looper looper) { 2806 super(looper); 2807 mMediaPlayer = mp; 2808 } 2809 2810 @Override 2811 public void handleMessage(Message msg) { 2812 if (mMediaPlayer.mNativeContext == 0) { 2813 Log.w(TAG, "mediaplayer went away with unhandled events"); 2814 return; 2815 } 2816 switch(msg.what) { 2817 case MEDIA_PREPARED: 2818 try { 2819 scanInternalSubtitleTracks(); 2820 } catch (RuntimeException e) { 2821 // send error message instead of crashing; 2822 // send error message instead of inlining a call to onError 2823 // to avoid code duplication. 2824 Message msg2 = obtainMessage( 2825 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null); 2826 sendMessage(msg2); 2827 } 2828 if (mOnPreparedListener != null) 2829 mOnPreparedListener.onPrepared(mMediaPlayer); 2830 return; 2831 2832 case MEDIA_PLAYBACK_COMPLETE: 2833 if (mOnCompletionListener != null) 2834 mOnCompletionListener.onCompletion(mMediaPlayer); 2835 stayAwake(false); 2836 return; 2837 2838 case MEDIA_STOPPED: 2839 { 2840 TimeProvider timeProvider = mTimeProvider; 2841 if (timeProvider != null) { 2842 timeProvider.onStopped(); 2843 } 2844 } 2845 break; 2846 2847 case MEDIA_STARTED: 2848 case MEDIA_PAUSED: 2849 { 2850 TimeProvider timeProvider = mTimeProvider; 2851 if (timeProvider != null) { 2852 timeProvider.onPaused(msg.what == MEDIA_PAUSED); 2853 } 2854 } 2855 break; 2856 2857 case MEDIA_BUFFERING_UPDATE: 2858 if (mOnBufferingUpdateListener != null) 2859 mOnBufferingUpdateListener.onBufferingUpdate(mMediaPlayer, msg.arg1); 2860 return; 2861 2862 case MEDIA_SEEK_COMPLETE: 2863 if (mOnSeekCompleteListener != null) { 2864 mOnSeekCompleteListener.onSeekComplete(mMediaPlayer); 2865 } 2866 // fall through 2867 2868 case MEDIA_SKIPPED: 2869 { 2870 TimeProvider timeProvider = mTimeProvider; 2871 if (timeProvider != null) { 2872 timeProvider.onSeekComplete(mMediaPlayer); 2873 } 2874 } 2875 return; 2876 2877 case MEDIA_SET_VIDEO_SIZE: 2878 if (mOnVideoSizeChangedListener != null) { 2879 mOnVideoSizeChangedListener.onVideoSizeChanged( 2880 mMediaPlayer, msg.arg1, msg.arg2); 2881 } 2882 return; 2883 2884 case MEDIA_ERROR: 2885 Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")"); 2886 boolean error_was_handled = false; 2887 if (mOnErrorListener != null) { 2888 error_was_handled = mOnErrorListener.onError(mMediaPlayer, msg.arg1, msg.arg2); 2889 } 2890 if (mOnCompletionListener != null && ! error_was_handled) { 2891 mOnCompletionListener.onCompletion(mMediaPlayer); 2892 } 2893 stayAwake(false); 2894 return; 2895 2896 case MEDIA_INFO: 2897 switch (msg.arg1) { 2898 case MEDIA_INFO_VIDEO_TRACK_LAGGING: 2899 Log.i(TAG, "Info (" + msg.arg1 + "," + msg.arg2 + ")"); 2900 break; 2901 case MEDIA_INFO_METADATA_UPDATE: 2902 try { 2903 scanInternalSubtitleTracks(); 2904 } catch (RuntimeException e) { 2905 Message msg2 = obtainMessage( 2906 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null); 2907 sendMessage(msg2); 2908 } 2909 // fall through 2910 2911 case MEDIA_INFO_EXTERNAL_METADATA_UPDATE: 2912 msg.arg1 = MEDIA_INFO_METADATA_UPDATE; 2913 // update default track selection 2914 if (mSubtitleController != null) { 2915 mSubtitleController.selectDefaultTrack(); 2916 } 2917 break; 2918 case MEDIA_INFO_BUFFERING_START: 2919 case MEDIA_INFO_BUFFERING_END: 2920 TimeProvider timeProvider = mTimeProvider; 2921 if (timeProvider != null) { 2922 timeProvider.onBuffering(msg.arg1 == MEDIA_INFO_BUFFERING_START); 2923 } 2924 break; 2925 } 2926 2927 if (mOnInfoListener != null) { 2928 mOnInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2); 2929 } 2930 // No real default action so far. 2931 return; 2932 case MEDIA_TIMED_TEXT: 2933 if (mOnTimedTextListener == null) 2934 return; 2935 if (msg.obj == null) { 2936 mOnTimedTextListener.onTimedText(mMediaPlayer, null); 2937 } else { 2938 if (msg.obj instanceof Parcel) { 2939 Parcel parcel = (Parcel)msg.obj; 2940 TimedText text = new TimedText(parcel); 2941 parcel.recycle(); 2942 mOnTimedTextListener.onTimedText(mMediaPlayer, text); 2943 } 2944 } 2945 return; 2946 2947 case MEDIA_SUBTITLE_DATA: 2948 if (mOnSubtitleDataListener == null) { 2949 return; 2950 } 2951 if (msg.obj instanceof Parcel) { 2952 Parcel parcel = (Parcel) msg.obj; 2953 SubtitleData data = new SubtitleData(parcel); 2954 parcel.recycle(); 2955 mOnSubtitleDataListener.onSubtitleData(mMediaPlayer, data); 2956 } 2957 return; 2958 2959 case MEDIA_META_DATA: 2960 if (mOnTimedMetaDataAvailableListener == null) { 2961 return; 2962 } 2963 if (msg.obj instanceof Parcel) { 2964 Parcel parcel = (Parcel) msg.obj; 2965 TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel); 2966 parcel.recycle(); 2967 mOnTimedMetaDataAvailableListener.onTimedMetaDataAvailable(mMediaPlayer, data); 2968 } 2969 return; 2970 2971 case MEDIA_NOP: // interface test message - ignore 2972 break; 2973 2974 default: 2975 Log.e(TAG, "Unknown message type " + msg.what); 2976 return; 2977 } 2978 } 2979 } 2980 2981 /* 2982 * Called from native code when an interesting event happens. This method 2983 * just uses the EventHandler system to post the event back to the main app thread. 2984 * We use a weak reference to the original MediaPlayer object so that the native 2985 * code is safe from the object disappearing from underneath it. (This is 2986 * the cookie passed to native_setup().) 2987 */ 2988 private static void postEventFromNative(Object mediaplayer_ref, 2989 int what, int arg1, int arg2, Object obj) 2990 { 2991 MediaPlayer mp = (MediaPlayer)((WeakReference)mediaplayer_ref).get(); 2992 if (mp == null) { 2993 return; 2994 } 2995 2996 if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) { 2997 // this acquires the wakelock if needed, and sets the client side state 2998 mp.start(); 2999 } 3000 if (mp.mEventHandler != null) { 3001 Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj); 3002 mp.mEventHandler.sendMessage(m); 3003 } 3004 } 3005 3006 /** 3007 * Interface definition for a callback to be invoked when the media 3008 * source is ready for playback. 3009 */ 3010 public interface OnPreparedListener 3011 { 3012 /** 3013 * Called when the media file is ready for playback. 3014 * 3015 * @param mp the MediaPlayer that is ready for playback 3016 */ 3017 void onPrepared(MediaPlayer mp); 3018 } 3019 3020 /** 3021 * Register a callback to be invoked when the media source is ready 3022 * for playback. 3023 * 3024 * @param listener the callback that will be run 3025 */ 3026 public void setOnPreparedListener(OnPreparedListener listener) 3027 { 3028 mOnPreparedListener = listener; 3029 } 3030 3031 private OnPreparedListener mOnPreparedListener; 3032 3033 /** 3034 * Interface definition for a callback to be invoked when playback of 3035 * a media source has completed. 3036 */ 3037 public interface OnCompletionListener 3038 { 3039 /** 3040 * Called when the end of a media source is reached during playback. 3041 * 3042 * @param mp the MediaPlayer that reached the end of the file 3043 */ 3044 void onCompletion(MediaPlayer mp); 3045 } 3046 3047 /** 3048 * Register a callback to be invoked when the end of a media source 3049 * has been reached during playback. 3050 * 3051 * @param listener the callback that will be run 3052 */ 3053 public void setOnCompletionListener(OnCompletionListener listener) 3054 { 3055 mOnCompletionListener = listener; 3056 } 3057 3058 private OnCompletionListener mOnCompletionListener; 3059 3060 /** 3061 * Interface definition of a callback to be invoked indicating buffering 3062 * status of a media resource being streamed over the network. 3063 */ 3064 public interface OnBufferingUpdateListener 3065 { 3066 /** 3067 * Called to update status in buffering a media stream received through 3068 * progressive HTTP download. The received buffering percentage 3069 * indicates how much of the content has been buffered or played. 3070 * For example a buffering update of 80 percent when half the content 3071 * has already been played indicates that the next 30 percent of the 3072 * content to play has been buffered. 3073 * 3074 * @param mp the MediaPlayer the update pertains to 3075 * @param percent the percentage (0-100) of the content 3076 * that has been buffered or played thus far 3077 */ 3078 void onBufferingUpdate(MediaPlayer mp, int percent); 3079 } 3080 3081 /** 3082 * Register a callback to be invoked when the status of a network 3083 * stream's buffer has changed. 3084 * 3085 * @param listener the callback that will be run. 3086 */ 3087 public void setOnBufferingUpdateListener(OnBufferingUpdateListener listener) 3088 { 3089 mOnBufferingUpdateListener = listener; 3090 } 3091 3092 private OnBufferingUpdateListener mOnBufferingUpdateListener; 3093 3094 /** 3095 * Interface definition of a callback to be invoked indicating 3096 * the completion of a seek operation. 3097 */ 3098 public interface OnSeekCompleteListener 3099 { 3100 /** 3101 * Called to indicate the completion of a seek operation. 3102 * 3103 * @param mp the MediaPlayer that issued the seek operation 3104 */ 3105 public void onSeekComplete(MediaPlayer mp); 3106 } 3107 3108 /** 3109 * Register a callback to be invoked when a seek operation has been 3110 * completed. 3111 * 3112 * @param listener the callback that will be run 3113 */ 3114 public void setOnSeekCompleteListener(OnSeekCompleteListener listener) 3115 { 3116 mOnSeekCompleteListener = listener; 3117 } 3118 3119 private OnSeekCompleteListener mOnSeekCompleteListener; 3120 3121 /** 3122 * Interface definition of a callback to be invoked when the 3123 * video size is first known or updated 3124 */ 3125 public interface OnVideoSizeChangedListener 3126 { 3127 /** 3128 * Called to indicate the video size 3129 * 3130 * The video size (width and height) could be 0 if there was no video, 3131 * no display surface was set, or the value was not determined yet. 3132 * 3133 * @param mp the MediaPlayer associated with this callback 3134 * @param width the width of the video 3135 * @param height the height of the video 3136 */ 3137 public void onVideoSizeChanged(MediaPlayer mp, int width, int height); 3138 } 3139 3140 /** 3141 * Register a callback to be invoked when the video size is 3142 * known or updated. 3143 * 3144 * @param listener the callback that will be run 3145 */ 3146 public void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener) 3147 { 3148 mOnVideoSizeChangedListener = listener; 3149 } 3150 3151 private OnVideoSizeChangedListener mOnVideoSizeChangedListener; 3152 3153 /** 3154 * Interface definition of a callback to be invoked when a 3155 * timed text is available for display. 3156 */ 3157 public interface OnTimedTextListener 3158 { 3159 /** 3160 * Called to indicate an avaliable timed text 3161 * 3162 * @param mp the MediaPlayer associated with this callback 3163 * @param text the timed text sample which contains the text 3164 * needed to be displayed and the display format. 3165 */ 3166 public void onTimedText(MediaPlayer mp, TimedText text); 3167 } 3168 3169 /** 3170 * Register a callback to be invoked when a timed text is available 3171 * for display. 3172 * 3173 * @param listener the callback that will be run 3174 */ 3175 public void setOnTimedTextListener(OnTimedTextListener listener) 3176 { 3177 mOnTimedTextListener = listener; 3178 } 3179 3180 private OnTimedTextListener mOnTimedTextListener; 3181 3182 /** 3183 * Interface definition of a callback to be invoked when a 3184 * track has data available. 3185 * 3186 * @hide 3187 */ 3188 public interface OnSubtitleDataListener 3189 { 3190 public void onSubtitleData(MediaPlayer mp, SubtitleData data); 3191 } 3192 3193 /** 3194 * Register a callback to be invoked when a track has data available. 3195 * 3196 * @param listener the callback that will be run 3197 * 3198 * @hide 3199 */ 3200 public void setOnSubtitleDataListener(OnSubtitleDataListener listener) 3201 { 3202 mOnSubtitleDataListener = listener; 3203 } 3204 3205 private OnSubtitleDataListener mOnSubtitleDataListener; 3206 3207 /** 3208 * Interface definition of a callback to be invoked when a 3209 * track has timed metadata available. 3210 * 3211 * @see MediaPlayer#setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener) 3212 */ 3213 public interface OnTimedMetaDataAvailableListener 3214 { 3215 /** 3216 * Called to indicate avaliable timed metadata 3217 * <p> 3218 * This method will be called as timed metadata is extracted from the media, 3219 * in the same order as it occurs in the media. The timing of this event is 3220 * not controlled by the associated timestamp. 3221 * 3222 * @param mp the MediaPlayer associated with this callback 3223 * @param data the timed metadata sample associated with this event 3224 */ 3225 public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData data); 3226 } 3227 3228 /** 3229 * Register a callback to be invoked when a selected track has timed metadata available. 3230 * <p> 3231 * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates 3232 * {@link TimedMetaData}. 3233 * 3234 * @see MediaPlayer#selectTrack(int) 3235 * @see MediaPlayer.OnTimedMetaDataAvailableListener 3236 * @see TimedMetaData 3237 * 3238 * @param listener the callback that will be run 3239 */ 3240 public void setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener listener) 3241 { 3242 mOnTimedMetaDataAvailableListener = listener; 3243 } 3244 3245 private OnTimedMetaDataAvailableListener mOnTimedMetaDataAvailableListener; 3246 3247 /* Do not change these values without updating their counterparts 3248 * in include/media/mediaplayer.h! 3249 */ 3250 /** Unspecified media player error. 3251 * @see android.media.MediaPlayer.OnErrorListener 3252 */ 3253 public static final int MEDIA_ERROR_UNKNOWN = 1; 3254 3255 /** Media server died. In this case, the application must release the 3256 * MediaPlayer object and instantiate a new one. 3257 * @see android.media.MediaPlayer.OnErrorListener 3258 */ 3259 public static final int MEDIA_ERROR_SERVER_DIED = 100; 3260 3261 /** The video is streamed and its container is not valid for progressive 3262 * playback i.e the video's index (e.g moov atom) is not at the start of the 3263 * file. 3264 * @see android.media.MediaPlayer.OnErrorListener 3265 */ 3266 public static final int MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200; 3267 3268 /** File or network related operation errors. */ 3269 public static final int MEDIA_ERROR_IO = -1004; 3270 /** Bitstream is not conforming to the related coding standard or file spec. */ 3271 public static final int MEDIA_ERROR_MALFORMED = -1007; 3272 /** Bitstream is conforming to the related coding standard or file spec, but 3273 * the media framework does not support the feature. */ 3274 public static final int MEDIA_ERROR_UNSUPPORTED = -1010; 3275 /** Some operation takes too long to complete, usually more than 3-5 seconds. */ 3276 public static final int MEDIA_ERROR_TIMED_OUT = -110; 3277 3278 /** Unspecified low-level system error. This value originated from UNKNOWN_ERROR in 3279 * system/core/include/utils/Errors.h 3280 * @see android.media.MediaPlayer.OnErrorListener 3281 * @hide 3282 */ 3283 public static final int MEDIA_ERROR_SYSTEM = -2147483648; 3284 3285 /** 3286 * Interface definition of a callback to be invoked when there 3287 * has been an error during an asynchronous operation (other errors 3288 * will throw exceptions at method call time). 3289 */ 3290 public interface OnErrorListener 3291 { 3292 /** 3293 * Called to indicate an error. 3294 * 3295 * @param mp the MediaPlayer the error pertains to 3296 * @param what the type of error that has occurred: 3297 * <ul> 3298 * <li>{@link #MEDIA_ERROR_UNKNOWN} 3299 * <li>{@link #MEDIA_ERROR_SERVER_DIED} 3300 * </ul> 3301 * @param extra an extra code, specific to the error. Typically 3302 * implementation dependent. 3303 * <ul> 3304 * <li>{@link #MEDIA_ERROR_IO} 3305 * <li>{@link #MEDIA_ERROR_MALFORMED} 3306 * <li>{@link #MEDIA_ERROR_UNSUPPORTED} 3307 * <li>{@link #MEDIA_ERROR_TIMED_OUT} 3308 * <li><code>MEDIA_ERROR_SYSTEM (-2147483648)</code> - low-level system error. 3309 * </ul> 3310 * @return True if the method handled the error, false if it didn't. 3311 * Returning false, or not having an OnErrorListener at all, will 3312 * cause the OnCompletionListener to be called. 3313 */ 3314 boolean onError(MediaPlayer mp, int what, int extra); 3315 } 3316 3317 /** 3318 * Register a callback to be invoked when an error has happened 3319 * during an asynchronous operation. 3320 * 3321 * @param listener the callback that will be run 3322 */ 3323 public void setOnErrorListener(OnErrorListener listener) 3324 { 3325 mOnErrorListener = listener; 3326 } 3327 3328 private OnErrorListener mOnErrorListener; 3329 3330 3331 /* Do not change these values without updating their counterparts 3332 * in include/media/mediaplayer.h! 3333 */ 3334 /** Unspecified media player info. 3335 * @see android.media.MediaPlayer.OnInfoListener 3336 */ 3337 public static final int MEDIA_INFO_UNKNOWN = 1; 3338 3339 /** The player was started because it was used as the next player for another 3340 * player, which just completed playback. 3341 * @see android.media.MediaPlayer.OnInfoListener 3342 * @hide 3343 */ 3344 public static final int MEDIA_INFO_STARTED_AS_NEXT = 2; 3345 3346 /** The player just pushed the very first video frame for rendering. 3347 * @see android.media.MediaPlayer.OnInfoListener 3348 */ 3349 public static final int MEDIA_INFO_VIDEO_RENDERING_START = 3; 3350 3351 /** The video is too complex for the decoder: it can't decode frames fast 3352 * enough. Possibly only the audio plays fine at this stage. 3353 * @see android.media.MediaPlayer.OnInfoListener 3354 */ 3355 public static final int MEDIA_INFO_VIDEO_TRACK_LAGGING = 700; 3356 3357 /** MediaPlayer is temporarily pausing playback internally in order to 3358 * buffer more data. 3359 * @see android.media.MediaPlayer.OnInfoListener 3360 */ 3361 public static final int MEDIA_INFO_BUFFERING_START = 701; 3362 3363 /** MediaPlayer is resuming playback after filling buffers. 3364 * @see android.media.MediaPlayer.OnInfoListener 3365 */ 3366 public static final int MEDIA_INFO_BUFFERING_END = 702; 3367 3368 /** Estimated network bandwidth information (kbps) is available; currently this event fires 3369 * simultaneously as {@link #MEDIA_INFO_BUFFERING_START} and {@link #MEDIA_INFO_BUFFERING_END} 3370 * when playing network files. 3371 * @see android.media.MediaPlayer.OnInfoListener 3372 * @hide 3373 */ 3374 public static final int MEDIA_INFO_NETWORK_BANDWIDTH = 703; 3375 3376 /** Bad interleaving means that a media has been improperly interleaved or 3377 * not interleaved at all, e.g has all the video samples first then all the 3378 * audio ones. Video is playing but a lot of disk seeks may be happening. 3379 * @see android.media.MediaPlayer.OnInfoListener 3380 */ 3381 public static final int MEDIA_INFO_BAD_INTERLEAVING = 800; 3382 3383 /** The media cannot be seeked (e.g live stream) 3384 * @see android.media.MediaPlayer.OnInfoListener 3385 */ 3386 public static final int MEDIA_INFO_NOT_SEEKABLE = 801; 3387 3388 /** A new set of metadata is available. 3389 * @see android.media.MediaPlayer.OnInfoListener 3390 */ 3391 public static final int MEDIA_INFO_METADATA_UPDATE = 802; 3392 3393 /** A new set of external-only metadata is available. Used by 3394 * JAVA framework to avoid triggering track scanning. 3395 * @hide 3396 */ 3397 public static final int MEDIA_INFO_EXTERNAL_METADATA_UPDATE = 803; 3398 3399 /** Failed to handle timed text track properly. 3400 * @see android.media.MediaPlayer.OnInfoListener 3401 * 3402 * {@hide} 3403 */ 3404 public static final int MEDIA_INFO_TIMED_TEXT_ERROR = 900; 3405 3406 /** Subtitle track was not supported by the media framework. 3407 * @see android.media.MediaPlayer.OnInfoListener 3408 */ 3409 public static final int MEDIA_INFO_UNSUPPORTED_SUBTITLE = 901; 3410 3411 /** Reading the subtitle track takes too long. 3412 * @see android.media.MediaPlayer.OnInfoListener 3413 */ 3414 public static final int MEDIA_INFO_SUBTITLE_TIMED_OUT = 902; 3415 3416 /** 3417 * Interface definition of a callback to be invoked to communicate some 3418 * info and/or warning about the media or its playback. 3419 */ 3420 public interface OnInfoListener 3421 { 3422 /** 3423 * Called to indicate an info or a warning. 3424 * 3425 * @param mp the MediaPlayer the info pertains to. 3426 * @param what the type of info or warning. 3427 * <ul> 3428 * <li>{@link #MEDIA_INFO_UNKNOWN} 3429 * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING} 3430 * <li>{@link #MEDIA_INFO_VIDEO_RENDERING_START} 3431 * <li>{@link #MEDIA_INFO_BUFFERING_START} 3432 * <li>{@link #MEDIA_INFO_BUFFERING_END} 3433 * <li><code>MEDIA_INFO_NETWORK_BANDWIDTH (703)</code> - 3434 * bandwidth information is available (as <code>extra</code> kbps) 3435 * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING} 3436 * <li>{@link #MEDIA_INFO_NOT_SEEKABLE} 3437 * <li>{@link #MEDIA_INFO_METADATA_UPDATE} 3438 * <li>{@link #MEDIA_INFO_UNSUPPORTED_SUBTITLE} 3439 * <li>{@link #MEDIA_INFO_SUBTITLE_TIMED_OUT} 3440 * </ul> 3441 * @param extra an extra code, specific to the info. Typically 3442 * implementation dependent. 3443 * @return True if the method handled the info, false if it didn't. 3444 * Returning false, or not having an OnErrorListener at all, will 3445 * cause the info to be discarded. 3446 */ 3447 boolean onInfo(MediaPlayer mp, int what, int extra); 3448 } 3449 3450 /** 3451 * Register a callback to be invoked when an info/warning is available. 3452 * 3453 * @param listener the callback that will be run 3454 */ 3455 public void setOnInfoListener(OnInfoListener listener) 3456 { 3457 mOnInfoListener = listener; 3458 } 3459 3460 private OnInfoListener mOnInfoListener; 3461 3462 /* 3463 * Test whether a given video scaling mode is supported. 3464 */ 3465 private boolean isVideoScalingModeSupported(int mode) { 3466 return (mode == VIDEO_SCALING_MODE_SCALE_TO_FIT || 3467 mode == VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING); 3468 } 3469 3470 /** @hide */ 3471 static class TimeProvider implements MediaPlayer.OnSeekCompleteListener, 3472 MediaTimeProvider { 3473 private static final String TAG = "MTP"; 3474 private static final long MAX_NS_WITHOUT_POSITION_CHECK = 5000000000L; 3475 private static final long MAX_EARLY_CALLBACK_US = 1000; 3476 private static final long TIME_ADJUSTMENT_RATE = 2; /* meaning 1/2 */ 3477 private long mLastTimeUs = 0; 3478 private MediaPlayer mPlayer; 3479 private boolean mPaused = true; 3480 private boolean mStopped = true; 3481 private boolean mBuffering; 3482 private long mLastReportedTime; 3483 private long mTimeAdjustment; 3484 // since we are expecting only a handful listeners per stream, there is 3485 // no need for log(N) search performance 3486 private MediaTimeProvider.OnMediaTimeListener mListeners[]; 3487 private long mTimes[]; 3488 private long mLastNanoTime; 3489 private Handler mEventHandler; 3490 private boolean mRefresh = false; 3491 private boolean mPausing = false; 3492 private boolean mSeeking = false; 3493 private static final int NOTIFY = 1; 3494 private static final int NOTIFY_TIME = 0; 3495 private static final int REFRESH_AND_NOTIFY_TIME = 1; 3496 private static final int NOTIFY_STOP = 2; 3497 private static final int NOTIFY_SEEK = 3; 3498 private HandlerThread mHandlerThread; 3499 3500 /** @hide */ 3501 public boolean DEBUG = false; 3502 3503 public TimeProvider(MediaPlayer mp) { 3504 mPlayer = mp; 3505 try { 3506 getCurrentTimeUs(true, false); 3507 } catch (IllegalStateException e) { 3508 // we assume starting position 3509 mRefresh = true; 3510 } 3511 3512 Looper looper; 3513 if ((looper = Looper.myLooper()) == null && 3514 (looper = Looper.getMainLooper()) == null) { 3515 // Create our own looper here in case MP was created without one 3516 mHandlerThread = new HandlerThread("MediaPlayerMTPEventThread", 3517 Process.THREAD_PRIORITY_FOREGROUND); 3518 mHandlerThread.start(); 3519 looper = mHandlerThread.getLooper(); 3520 } 3521 mEventHandler = new EventHandler(looper); 3522 3523 mListeners = new MediaTimeProvider.OnMediaTimeListener[0]; 3524 mTimes = new long[0]; 3525 mLastTimeUs = 0; 3526 mTimeAdjustment = 0; 3527 } 3528 3529 private void scheduleNotification(int type, long delayUs) { 3530 // ignore time notifications until seek is handled 3531 if (mSeeking && 3532 (type == NOTIFY_TIME || type == REFRESH_AND_NOTIFY_TIME)) { 3533 return; 3534 } 3535 3536 if (DEBUG) Log.v(TAG, "scheduleNotification " + type + " in " + delayUs); 3537 mEventHandler.removeMessages(NOTIFY); 3538 Message msg = mEventHandler.obtainMessage(NOTIFY, type, 0); 3539 mEventHandler.sendMessageDelayed(msg, (int) (delayUs / 1000)); 3540 } 3541 3542 /** @hide */ 3543 public void close() { 3544 mEventHandler.removeMessages(NOTIFY); 3545 if (mHandlerThread != null) { 3546 mHandlerThread.quitSafely(); 3547 mHandlerThread = null; 3548 } 3549 } 3550 3551 /** @hide */ 3552 protected void finalize() { 3553 if (mHandlerThread != null) { 3554 mHandlerThread.quitSafely(); 3555 } 3556 } 3557 3558 /** @hide */ 3559 public void onPaused(boolean paused) { 3560 synchronized(this) { 3561 if (DEBUG) Log.d(TAG, "onPaused: " + paused); 3562 if (mStopped) { // handle as seek if we were stopped 3563 mStopped = false; 3564 mSeeking = true; 3565 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3566 } else { 3567 mPausing = paused; // special handling if player disappeared 3568 mSeeking = false; 3569 scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */); 3570 } 3571 } 3572 } 3573 3574 /** @hide */ 3575 public void onBuffering(boolean buffering) { 3576 synchronized (this) { 3577 if (DEBUG) Log.d(TAG, "onBuffering: " + buffering); 3578 mBuffering = buffering; 3579 scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */); 3580 } 3581 } 3582 3583 /** @hide */ 3584 public void onStopped() { 3585 synchronized(this) { 3586 if (DEBUG) Log.d(TAG, "onStopped"); 3587 mPaused = true; 3588 mStopped = true; 3589 mSeeking = false; 3590 mBuffering = false; 3591 scheduleNotification(NOTIFY_STOP, 0 /* delay */); 3592 } 3593 } 3594 3595 /** @hide */ 3596 @Override 3597 public void onSeekComplete(MediaPlayer mp) { 3598 synchronized(this) { 3599 mStopped = false; 3600 mSeeking = true; 3601 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3602 } 3603 } 3604 3605 /** @hide */ 3606 public void onNewPlayer() { 3607 if (mRefresh) { 3608 synchronized(this) { 3609 mStopped = false; 3610 mSeeking = true; 3611 mBuffering = false; 3612 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3613 } 3614 } 3615 } 3616 3617 private synchronized void notifySeek() { 3618 mSeeking = false; 3619 try { 3620 long timeUs = getCurrentTimeUs(true, false); 3621 if (DEBUG) Log.d(TAG, "onSeekComplete at " + timeUs); 3622 3623 for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) { 3624 if (listener == null) { 3625 break; 3626 } 3627 listener.onSeek(timeUs); 3628 } 3629 } catch (IllegalStateException e) { 3630 // we should not be there, but at least signal pause 3631 if (DEBUG) Log.d(TAG, "onSeekComplete but no player"); 3632 mPausing = true; // special handling if player disappeared 3633 notifyTimedEvent(false /* refreshTime */); 3634 } 3635 } 3636 3637 private synchronized void notifyStop() { 3638 for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) { 3639 if (listener == null) { 3640 break; 3641 } 3642 listener.onStop(); 3643 } 3644 } 3645 3646 private int registerListener(MediaTimeProvider.OnMediaTimeListener listener) { 3647 int i = 0; 3648 for (; i < mListeners.length; i++) { 3649 if (mListeners[i] == listener || mListeners[i] == null) { 3650 break; 3651 } 3652 } 3653 3654 // new listener 3655 if (i >= mListeners.length) { 3656 MediaTimeProvider.OnMediaTimeListener[] newListeners = 3657 new MediaTimeProvider.OnMediaTimeListener[i + 1]; 3658 long[] newTimes = new long[i + 1]; 3659 System.arraycopy(mListeners, 0, newListeners, 0, mListeners.length); 3660 System.arraycopy(mTimes, 0, newTimes, 0, mTimes.length); 3661 mListeners = newListeners; 3662 mTimes = newTimes; 3663 } 3664 3665 if (mListeners[i] == null) { 3666 mListeners[i] = listener; 3667 mTimes[i] = MediaTimeProvider.NO_TIME; 3668 } 3669 return i; 3670 } 3671 3672 public void notifyAt( 3673 long timeUs, MediaTimeProvider.OnMediaTimeListener listener) { 3674 synchronized(this) { 3675 if (DEBUG) Log.d(TAG, "notifyAt " + timeUs); 3676 mTimes[registerListener(listener)] = timeUs; 3677 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3678 } 3679 } 3680 3681 public void scheduleUpdate(MediaTimeProvider.OnMediaTimeListener listener) { 3682 synchronized(this) { 3683 if (DEBUG) Log.d(TAG, "scheduleUpdate"); 3684 int i = registerListener(listener); 3685 3686 if (!mStopped) { 3687 mTimes[i] = 0; 3688 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3689 } 3690 } 3691 } 3692 3693 public void cancelNotifications( 3694 MediaTimeProvider.OnMediaTimeListener listener) { 3695 synchronized(this) { 3696 int i = 0; 3697 for (; i < mListeners.length; i++) { 3698 if (mListeners[i] == listener) { 3699 System.arraycopy(mListeners, i + 1, 3700 mListeners, i, mListeners.length - i - 1); 3701 System.arraycopy(mTimes, i + 1, 3702 mTimes, i, mTimes.length - i - 1); 3703 mListeners[mListeners.length - 1] = null; 3704 mTimes[mTimes.length - 1] = NO_TIME; 3705 break; 3706 } else if (mListeners[i] == null) { 3707 break; 3708 } 3709 } 3710 3711 scheduleNotification(NOTIFY_TIME, 0 /* delay */); 3712 } 3713 } 3714 3715 private synchronized void notifyTimedEvent(boolean refreshTime) { 3716 // figure out next callback 3717 long nowUs; 3718 try { 3719 nowUs = getCurrentTimeUs(refreshTime, true); 3720 } catch (IllegalStateException e) { 3721 // assume we paused until new player arrives 3722 mRefresh = true; 3723 mPausing = true; // this ensures that call succeeds 3724 nowUs = getCurrentTimeUs(refreshTime, true); 3725 } 3726 long nextTimeUs = nowUs; 3727 3728 if (mSeeking) { 3729 // skip timed-event notifications until seek is complete 3730 return; 3731 } 3732 3733 if (DEBUG) { 3734 StringBuilder sb = new StringBuilder(); 3735 sb.append("notifyTimedEvent(").append(mLastTimeUs).append(" -> ") 3736 .append(nowUs).append(") from {"); 3737 boolean first = true; 3738 for (long time: mTimes) { 3739 if (time == NO_TIME) { 3740 continue; 3741 } 3742 if (!first) sb.append(", "); 3743 sb.append(time); 3744 first = false; 3745 } 3746 sb.append("}"); 3747 Log.d(TAG, sb.toString()); 3748 } 3749 3750 Vector<MediaTimeProvider.OnMediaTimeListener> activatedListeners = 3751 new Vector<MediaTimeProvider.OnMediaTimeListener>(); 3752 for (int ix = 0; ix < mTimes.length; ix++) { 3753 if (mListeners[ix] == null) { 3754 break; 3755 } 3756 if (mTimes[ix] <= NO_TIME) { 3757 // ignore, unless we were stopped 3758 } else if (mTimes[ix] <= nowUs + MAX_EARLY_CALLBACK_US) { 3759 activatedListeners.add(mListeners[ix]); 3760 if (DEBUG) Log.d(TAG, "removed"); 3761 mTimes[ix] = NO_TIME; 3762 } else if (nextTimeUs == nowUs || mTimes[ix] < nextTimeUs) { 3763 nextTimeUs = mTimes[ix]; 3764 } 3765 } 3766 3767 if (nextTimeUs > nowUs && !mPaused) { 3768 // schedule callback at nextTimeUs 3769 if (DEBUG) Log.d(TAG, "scheduling for " + nextTimeUs + " and " + nowUs); 3770 scheduleNotification(NOTIFY_TIME, nextTimeUs - nowUs); 3771 } else { 3772 mEventHandler.removeMessages(NOTIFY); 3773 // no more callbacks 3774 } 3775 3776 for (MediaTimeProvider.OnMediaTimeListener listener: activatedListeners) { 3777 listener.onTimedEvent(nowUs); 3778 } 3779 } 3780 3781 private long getEstimatedTime(long nanoTime, boolean monotonic) { 3782 if (mPaused) { 3783 mLastReportedTime = mLastTimeUs + mTimeAdjustment; 3784 } else { 3785 long timeSinceRead = (nanoTime - mLastNanoTime) / 1000; 3786 mLastReportedTime = mLastTimeUs + timeSinceRead; 3787 if (mTimeAdjustment > 0) { 3788 long adjustment = 3789 mTimeAdjustment - timeSinceRead / TIME_ADJUSTMENT_RATE; 3790 if (adjustment <= 0) { 3791 mTimeAdjustment = 0; 3792 } else { 3793 mLastReportedTime += adjustment; 3794 } 3795 } 3796 } 3797 return mLastReportedTime; 3798 } 3799 3800 public long getCurrentTimeUs(boolean refreshTime, boolean monotonic) 3801 throws IllegalStateException { 3802 synchronized (this) { 3803 // we always refresh the time when the paused-state changes, because 3804 // we expect to have received the pause-change event delayed. 3805 if (mPaused && !refreshTime) { 3806 return mLastReportedTime; 3807 } 3808 3809 long nanoTime = System.nanoTime(); 3810 if (refreshTime || 3811 nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) { 3812 try { 3813 mLastTimeUs = mPlayer.getCurrentPosition() * 1000L; 3814 mPaused = !mPlayer.isPlaying() || mBuffering; 3815 if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs); 3816 } catch (IllegalStateException e) { 3817 if (mPausing) { 3818 // if we were pausing, get last estimated timestamp 3819 mPausing = false; 3820 getEstimatedTime(nanoTime, monotonic); 3821 mPaused = true; 3822 if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime); 3823 return mLastReportedTime; 3824 } 3825 // TODO get time when prepared 3826 throw e; 3827 } 3828 mLastNanoTime = nanoTime; 3829 if (monotonic && mLastTimeUs < mLastReportedTime) { 3830 /* have to adjust time */ 3831 mTimeAdjustment = mLastReportedTime - mLastTimeUs; 3832 if (mTimeAdjustment > 1000000) { 3833 // schedule seeked event if time jumped significantly 3834 // TODO: do this properly by introducing an exception 3835 mStopped = false; 3836 mSeeking = true; 3837 scheduleNotification(NOTIFY_SEEK, 0 /* delay */); 3838 } 3839 } else { 3840 mTimeAdjustment = 0; 3841 } 3842 } 3843 3844 return getEstimatedTime(nanoTime, monotonic); 3845 } 3846 } 3847 3848 private class EventHandler extends Handler { 3849 public EventHandler(Looper looper) { 3850 super(looper); 3851 } 3852 3853 @Override 3854 public void handleMessage(Message msg) { 3855 if (msg.what == NOTIFY) { 3856 switch (msg.arg1) { 3857 case NOTIFY_TIME: 3858 notifyTimedEvent(false /* refreshTime */); 3859 break; 3860 case REFRESH_AND_NOTIFY_TIME: 3861 notifyTimedEvent(true /* refreshTime */); 3862 break; 3863 case NOTIFY_STOP: 3864 notifyStop(); 3865 break; 3866 case NOTIFY_SEEK: 3867 notifySeek(); 3868 break; 3869 } 3870 } 3871 } 3872 } 3873 } 3874} 3875