native-media-jni.c revision 37dc2fccf3f122b79ebd554de209d0a3c94ae161
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include <assert.h> 18#include <jni.h> 19#include <pthread.h> 20#include <string.h> 21#define LOG_NDEBUG 0 22#define LOG_TAG "NativeMedia" 23#include <utils/Log.h> 24 25#include "OMXAL/OpenMAXAL.h" 26#include "OMXAL/OpenMAXAL_Android.h" 27 28#include <android/native_window_jni.h> 29 30// define as 1 if ANativeWindow * is not supported as a video sink 31#define NO_NATIVE_WINDOW 1 32 33// engine interfaces 34static XAObjectItf engineObject = NULL; 35static XAEngineItf engineEngine; 36 37// output mix interfaces 38static XAObjectItf outputMixObject = NULL; 39 40// streaming media player interfaces 41static XAObjectItf playerObj = NULL; 42static XAPlayItf playerPlayItf = NULL; 43static XAAndroidBufferQueueItf playerBQItf = NULL; 44static XAStreamInformationItf playerStreamInfoItf = NULL; 45static XAVolumeItf playerVolItf; 46// number of required interfaces for the MediaPlayer creation 47#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf 48 49// cached surface where the video display happens 50#if NO_NATIVE_WINDOW 51static jobject theSurfaceOrSurfaceTexture; 52#else 53static ANativeWindow* theNativeWindow; 54#endif 55 56// number of buffers in our buffer queue 57#define NB_BUFFERS 16 58// we're streaming MPEG-2 transport stream data, operate on transport stream block size 59#define MPEG2_TS_BLOCK_SIZE 188 60// determines how much memory we're dedicating to memory caching 61#define BUFFER_SIZE 20*MPEG2_TS_BLOCK_SIZE // 20 is an arbitrary number chosen here 62 63// where we cache in memory the data to play 64char dataCache[BUFFER_SIZE * NB_BUFFERS]; 65// handle of the file to play 66FILE *file; 67// has the app reached the end of the file 68char reachedEof = 0; 69 70// AndroidBufferQueueItf callback for an audio player 71XAresult AndroidBufferQueueCallback( 72 XAAndroidBufferQueueItf caller, 73 void *pCallbackContext, /* input */ 74 void *pBufferContext, /* input */ 75 void *pBufferData, /* input */ 76 XAuint32 dataSize, /* input */ 77 XAuint32 dataUsed, /* input */ 78 const XAAndroidBufferItem *pItems,/* input */ 79 XAuint32 itemsLength /* input */) 80{ 81 // assert(BUFFER_SIZE <= dataSize); 82 if (pBufferData == NULL) { 83 // this is the case when our buffer with the EOS message has been consumed 84 return XA_RESULT_SUCCESS; 85 } 86 87#if 0 88 // sample code to use the XAVolumeItf 89 XAAndroidBufferQueueState state; 90 (*caller)->GetState(caller, &state); 91 switch (state.index) { 92 case 300: 93 (*playerVolItf)->SetVolumeLevel(playerVolItf, -600); // -6dB 94 LOGV("setting volume to -6dB"); 95 break; 96 case 400: 97 (*playerVolItf)->SetVolumeLevel(playerVolItf, -1200); // -12dB 98 LOGV("setting volume to -12dB"); 99 break; 100 case 500: 101 (*playerVolItf)->SetVolumeLevel(playerVolItf, 0); // full volume 102 LOGV("setting volume to 0dB (full volume)"); 103 break; 104 case 600: 105 (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_TRUE); // mute 106 LOGV("muting player"); 107 break; 108 case 700: 109 (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_FALSE); // unmute 110 LOGV("unmuting player"); 111 break; 112 case 800: 113 (*playerVolItf)->SetStereoPosition(playerVolItf, -1000); 114 (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_TRUE); 115 LOGV("pan sound to the left (hard-left)"); 116 break; 117 case 900: 118 (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_FALSE); 119 LOGV("disabling stereo position"); 120 break; 121 default: 122 break; 123 } 124#endif 125 126 size_t nbRead = fread((void*)pBufferData, 1, BUFFER_SIZE, file); 127 if ((nbRead > 0) && (NULL != pBufferData)) { 128 (*caller)->Enqueue(caller, NULL /*pBufferContext*/, 129 pBufferData /*pData*/, 130 nbRead /*dataLength*/, 131 NULL /*pMsg*/, 132 0 /*msgLength*/); 133 } else if (!reachedEof) { 134 // signal EOS 135 XAAndroidBufferItem msgEos; 136 msgEos.itemKey = XA_ANDROID_ITEMKEY_EOS; 137 msgEos.itemSize = 0; 138 // EOS message has no parameters, so the total size of the message is the size of the key 139 // plus the size if itemSize, both XAuint32 140 (*caller)->Enqueue(caller, NULL /*pBufferContext*/, 141 NULL /*pData*/, 0 /*dataLength*/, 142 &msgEos /*pMsg*/, 143 sizeof(XAuint32)*2 /*msgLength*/); 144 reachedEof = 1; 145 } 146 147 return XA_RESULT_SUCCESS; 148} 149 150 151void StreamChangeCallback (XAStreamInformationItf caller, 152 XAuint32 eventId, 153 XAuint32 streamIndex, 154 void * pEventData, 155 void * pContext ) 156{ 157 if (XA_STREAMCBEVENT_PROPERTYCHANGE == eventId) { 158 LOGD("StreamChangeCallback called for stream %lu", streamIndex); 159 160 XAuint32 domain; 161 if (XA_RESULT_SUCCESS == (*caller)->QueryStreamType(caller, streamIndex, &domain)) { 162 if (XA_DOMAINTYPE_VIDEO == domain) { 163 XAVideoStreamInformation videoInfo; 164 if (XA_RESULT_SUCCESS == (*caller)->QueryStreamInformation(caller, streamIndex, 165 &videoInfo)) { 166 LOGI("Found video size %lu x %lu", videoInfo.width, videoInfo.height); 167 } 168 } 169 } 170 } 171} 172 173 174// create the engine and output mix objects 175void Java_com_example_nativemedia_NativeMedia_createEngine(JNIEnv* env, jclass clazz) 176{ 177 XAresult res; 178 179 // create engine 180 res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL); 181 assert(XA_RESULT_SUCCESS == res); 182 183 // realize the engine 184 res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE); 185 assert(XA_RESULT_SUCCESS == res); 186 187 // get the engine interface, which is needed in order to create other objects 188 res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine); 189 assert(XA_RESULT_SUCCESS == res); 190 191 // create output mix 192 res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL); 193 assert(XA_RESULT_SUCCESS == res); 194 195 // realize the output mix 196 res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE); 197 assert(XA_RESULT_SUCCESS == res); 198 199} 200 201 202// create streaming media player 203jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env, 204 jclass clazz, jstring filename) 205{ 206 XAresult res; 207 208 // convert Java string to UTF-8 209 const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL); 210 assert(NULL != utf8); 211 212 // open the file to play 213 file = fopen(utf8, "rb"); 214 if (file == NULL) { 215 LOGE("Failed to open %s", utf8); 216 return JNI_FALSE; 217 } 218 219 // configure data source 220 XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS }; 221 XADataFormat_MIME format_mime = { 222 XA_DATAFORMAT_MIME, (XAchar *)"video/mp2ts", XA_CONTAINERTYPE_MPEG_TS }; 223 XADataSource dataSrc = {&loc_abq, &format_mime}; 224 225 // configure audio sink 226 XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject }; 227 XADataSink audioSnk = { &loc_outmix, NULL }; 228 229 // configure image video sink 230 XADataLocator_NativeDisplay loc_nd = { 231 XA_DATALOCATOR_NATIVEDISPLAY, // locatorType 232#if NO_NATIVE_WINDOW 233 (void *) theSurfaceOrSurfaceTexture, // jobject 234 (void *) env // JNIEnv *env 235#else 236 // later the video sink can be an ANativeWindow created from a Surface or SurfaceTexture 237 (void*)theNativeWindow, // hWindow 238 // must be NULL 239 NULL // hDisplay 240#endif 241 }; 242 XADataSink imageVideoSink = {&loc_nd, NULL}; 243 244 // declare interfaces to use 245 XAboolean required[NB_MAXAL_INTERFACES] 246 = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE}; 247 XAInterfaceID iidArray[NB_MAXAL_INTERFACES] 248 = {XA_IID_PLAY, XA_IID_ANDROIDBUFFERQUEUE, XA_IID_STREAMINFORMATION}; 249 250 251 // create media player 252 res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc, 253 NULL, &audioSnk, &imageVideoSink, NULL, NULL, 254 NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/, 255 iidArray /*const XAInterfaceID *pInterfaceIds*/, 256 required /*const XAboolean *pInterfaceRequired*/); 257 assert(XA_RESULT_SUCCESS == res); 258 259 // release the Java string and UTF-8 260 (*env)->ReleaseStringUTFChars(env, filename, utf8); 261 262 // realize the player 263 res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE); 264 assert(XA_RESULT_SUCCESS == res); 265 266 // get the play interface 267 res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf); 268 assert(XA_RESULT_SUCCESS == res); 269 270 // get the stream information interface (for video size) 271 res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf); 272 assert(XA_RESULT_SUCCESS == res); 273 274 // get the volume interface 275 res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf); 276 assert(XA_RESULT_SUCCESS == res); 277 278 // get the Android buffer queue interface 279 res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUE, &playerBQItf); 280 assert(XA_RESULT_SUCCESS == res); 281 282 // register the callback from which OpenMAX AL can retrieve the data to play 283 res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL); 284 assert(XA_RESULT_SUCCESS == res); 285 286 // we want to be notified of the video size once it's found, so we register a callback for that 287 res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf, 288 StreamChangeCallback, NULL); 289 290 /* Fill our cache */ 291 if (fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file) <= 0) { 292 LOGE("Error filling cache, exiting\n"); 293 return JNI_FALSE; 294 } 295 /* Enqueue the content of our cache before starting to play, 296 we don't want to starve the player */ 297 int i; 298 for (i=0 ; i < NB_BUFFERS ; i++) { 299 res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/, 300 dataCache + i*BUFFER_SIZE, BUFFER_SIZE, NULL, 0); 301 assert(XA_RESULT_SUCCESS == res); 302 } 303 304 // prepare the player 305 res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED); 306 assert(XA_RESULT_SUCCESS == res); 307 308 // set the volume 309 res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);//-300); 310 assert(XA_RESULT_SUCCESS == res); 311 312 // start the playback 313 res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING); 314 assert(XA_RESULT_SUCCESS == res); 315 316 return JNI_TRUE; 317} 318 319 320// set the playing state for the streaming media player 321void Java_com_example_nativemedia_NativeMedia_setPlayingStreamingMediaPlayer(JNIEnv* env, 322 jclass clazz, jboolean isPlaying) 323{ 324 XAresult res; 325 326 // make sure the streaming media player was created 327 if (NULL != playerPlayItf) { 328 329 // set the player's state 330 res = (*playerPlayItf)->SetPlayState(playerPlayItf, isPlaying ? 331 XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED); 332 assert(XA_RESULT_SUCCESS == res); 333 334 } 335 336} 337 338 339// shut down the native media system 340void Java_com_example_nativemedia_NativeMedia_shutdown(JNIEnv* env, jclass clazz) 341{ 342 // destroy streaming media player object, and invalidate all associated interfaces 343 if (playerObj != NULL) { 344 (*playerObj)->Destroy(playerObj); 345 playerObj = NULL; 346 playerPlayItf = NULL; 347 playerBQItf = NULL; 348 } 349 350 // destroy output mix object, and invalidate all associated interfaces 351 if (outputMixObject != NULL) { 352 (*outputMixObject)->Destroy(outputMixObject); 353 outputMixObject = NULL; 354 } 355 356 // destroy engine object, and invalidate all associated interfaces 357 if (engineObject != NULL) { 358 (*engineObject)->Destroy(engineObject); 359 engineObject = NULL; 360 engineEngine = NULL; 361 } 362 363 // close the file 364 if (file != NULL) { 365 fclose(file); 366 file = NULL; 367 } 368 369#if !NO_NATIVE_WINDOW 370 // make sure we don't leak native windows 371 if (theNativeWindow != NULL) { 372 ANativeWindow_release(theNativeWindow); 373 theNativeWindow = NULL; 374 } 375#endif 376} 377 378 379// set the surface 380void Java_com_example_nativemedia_NativeMedia_setSurface(JNIEnv *env, jclass clazz, jobject surface) 381{ 382#if NO_NATIVE_WINDOW 383 theSurfaceOrSurfaceTexture = surface; 384#else 385 // obtain a native window from a Java surface 386 theNativeWindow = ANativeWindow_fromSurface(env, surface); 387#endif 388} 389 390 391// set the surface texture 392void Java_com_example_nativemedia_NativeMedia_setSurfaceTexture(JNIEnv *env, jclass clazz, 393 jobject surfaceTexture) 394{ 395#if NO_NATIVE_WINDOW 396 theSurfaceOrSurfaceTexture = surfaceTexture; 397#else 398 // obtain a native window from a Java surface texture 399 theNativeWindow = ANativeWindow_fromSurfaceTexture(env, surfaceTexture); 400#endif 401} 402