native-media-jni.c revision c6853892c94800e72c0bd676d5d2136d48cea76e
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <assert.h>
18#include <jni.h>
19#include <pthread.h>
20#include <string.h>
21#define LOG_NDEBUG 0
22#define LOG_TAG "NativeMedia"
23#include <utils/Log.h>
24
25#include <OMXAL/OpenMAXAL.h>
26#include <OMXAL/OpenMAXAL_Android.h>
27
28#include <android/native_window_jni.h>
29
30// engine interfaces
31static XAObjectItf engineObject = NULL;
32static XAEngineItf engineEngine;
33
34// output mix interfaces
35static XAObjectItf outputMixObject = NULL;
36
37// streaming media player interfaces
38static XAObjectItf             playerObj = NULL;
39static XAPlayItf               playerPlayItf = NULL;
40static XAAndroidBufferQueueItf playerBQItf = NULL;
41static XAStreamInformationItf  playerStreamInfoItf = NULL;
42static XAVolumeItf             playerVolItf;
43// number of required interfaces for the MediaPlayer creation
44#define NB_MAXAL_INTERFACES 3 // XAAndroidBufferQueueItf, XAStreamInformationItf and XAPlayItf
45
46// video sink for the player
47static ANativeWindow* theNativeWindow;
48
49// number of buffers in our buffer queue
50#define NB_BUFFERS 16
51// we're streaming MPEG-2 transport stream data, operate on transport stream block size
52#define MPEG2_TS_BLOCK_SIZE 188
53// determines how much memory we're dedicating to memory caching
54#define BUFFER_SIZE 20*MPEG2_TS_BLOCK_SIZE // 20 is an arbitrary number chosen here
55
56// where we cache in memory the data to play
57char dataCache[BUFFER_SIZE * NB_BUFFERS];
58// handle of the file to play
59FILE *file;
60// has the app reached the end of the file
61char reachedEof = 0;
62
63// AndroidBufferQueueItf callback for an audio player
64XAresult AndroidBufferQueueCallback(
65        XAAndroidBufferQueueItf caller,
66        void *pCallbackContext,        /* input */
67        void *pBufferContext,          /* input */
68        void *pBufferData,             /* input */
69        XAuint32 dataSize,             /* input */
70        XAuint32 dataUsed,             /* input */
71        const XAAndroidBufferItem *pItems,/* input */
72        XAuint32 itemsLength           /* input */)
73{
74    // assert(BUFFER_SIZE <= dataSize);
75    if (pBufferData == NULL) {
76        // this is the case when our buffer with the EOS message has been consumed
77        return XA_RESULT_SUCCESS;
78    }
79
80#if 0
81    // sample code to use the XAVolumeItf
82    XAAndroidBufferQueueState state;
83    (*caller)->GetState(caller, &state);
84    switch (state.index) {
85    case 300:
86        (*playerVolItf)->SetVolumeLevel(playerVolItf, -600); // -6dB
87        LOGV("setting volume to -6dB");
88        break;
89    case 400:
90        (*playerVolItf)->SetVolumeLevel(playerVolItf, -1200); // -12dB
91        LOGV("setting volume to -12dB");
92        break;
93    case 500:
94        (*playerVolItf)->SetVolumeLevel(playerVolItf, 0); // full volume
95        LOGV("setting volume to 0dB (full volume)");
96        break;
97    case 600:
98        (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_TRUE); // mute
99        LOGV("muting player");
100        break;
101    case 700:
102        (*playerVolItf)->SetMute(playerVolItf, XA_BOOLEAN_FALSE); // unmute
103        LOGV("unmuting player");
104        break;
105    case 800:
106        (*playerVolItf)->SetStereoPosition(playerVolItf, -1000);
107        (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_TRUE);
108        LOGV("pan sound to the left (hard-left)");
109        break;
110    case 900:
111        (*playerVolItf)->EnableStereoPosition(playerVolItf, XA_BOOLEAN_FALSE);
112        LOGV("disabling stereo position");
113        break;
114    default:
115        break;
116    }
117#endif
118
119    size_t nbRead = fread((void*)pBufferData, 1, BUFFER_SIZE, file);
120    if ((nbRead > 0) && (NULL != pBufferData)) {
121        (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
122                pBufferData /*pData*/,
123                nbRead /*dataLength*/,
124                NULL /*pMsg*/,
125                0 /*msgLength*/);
126    } else if (!reachedEof) {
127        // signal EOS
128        XAAndroidBufferItem msgEos;
129        msgEos.itemKey = XA_ANDROID_ITEMKEY_EOS;
130        msgEos.itemSize = 0;
131        // EOS message has no parameters, so the total size of the message is the size of the key
132        //   plus the size if itemSize, both XAuint32
133        (*caller)->Enqueue(caller, NULL /*pBufferContext*/,
134                NULL /*pData*/, 0 /*dataLength*/,
135                &msgEos /*pMsg*/,
136                sizeof(XAuint32)*2 /*msgLength*/);
137        reachedEof = 1;
138    }
139
140    return XA_RESULT_SUCCESS;
141}
142
143
144void StreamChangeCallback (XAStreamInformationItf caller,
145        XAuint32 eventId,
146        XAuint32 streamIndex,
147        void * pEventData,
148        void * pContext )
149{
150    if (XA_STREAMCBEVENT_PROPERTYCHANGE == eventId) {
151        LOGD("StreamChangeCallback called for stream %u", streamIndex);
152
153        XAuint32 domain;
154        if (XA_RESULT_SUCCESS == (*caller)->QueryStreamType(caller, streamIndex, &domain)) {
155            if (XA_DOMAINTYPE_VIDEO == domain) {
156                XAVideoStreamInformation videoInfo;
157                if (XA_RESULT_SUCCESS == (*caller)->QueryStreamInformation(caller, streamIndex,
158                        &videoInfo)) {
159                    LOGI("Found video size %u x %u", videoInfo.width, videoInfo.height);
160                }
161            }
162        }
163    }
164}
165
166
167// create the engine and output mix objects
168void Java_com_example_nativemedia_NativeMedia_createEngine(JNIEnv* env, jclass clazz)
169{
170    XAresult res;
171
172    // create engine
173    res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
174    assert(XA_RESULT_SUCCESS == res);
175
176    // realize the engine
177    res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE);
178    assert(XA_RESULT_SUCCESS == res);
179
180    // get the engine interface, which is needed in order to create other objects
181    res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine);
182    assert(XA_RESULT_SUCCESS == res);
183
184    // create output mix
185    res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL);
186    assert(XA_RESULT_SUCCESS == res);
187
188    // realize the output mix
189    res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE);
190    assert(XA_RESULT_SUCCESS == res);
191
192}
193
194
195// create streaming media player
196jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env,
197        jclass clazz, jstring filename)
198{
199    XAresult res;
200
201    // convert Java string to UTF-8
202    const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
203    assert(NULL != utf8);
204
205    // open the file to play
206    file = fopen(utf8, "rb");
207    if (file == NULL) {
208        LOGE("Failed to open %s", utf8);
209        return JNI_FALSE;
210    }
211
212    // configure data source
213    XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
214    XADataFormat_MIME format_mime = {
215            XA_DATAFORMAT_MIME, (XAchar *)"video/mp2ts", XA_CONTAINERTYPE_MPEG_TS };
216    XADataSource dataSrc = {&loc_abq, &format_mime};
217
218    // configure audio sink
219    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
220    XADataSink audioSnk = { &loc_outmix, NULL };
221
222    // configure image video sink
223    XADataLocator_NativeDisplay loc_nd = {
224            XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
225            // the video sink must be an ANativeWindow created from a Surface or SurfaceTexture
226            (void*)theNativeWindow,              // hWindow
227            // must be NULL
228            NULL                                 // hDisplay
229    };
230    XADataSink imageVideoSink = {&loc_nd, NULL};
231
232    // declare interfaces to use
233    XAboolean     required[NB_MAXAL_INTERFACES]
234                           = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE,           XA_BOOLEAN_TRUE};
235    XAInterfaceID iidArray[NB_MAXAL_INTERFACES]
236                           = {XA_IID_PLAY,     XA_IID_ANDROIDBUFFERQUEUE, XA_IID_STREAMINFORMATION};
237
238
239    // create media player
240    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
241            NULL, &audioSnk, &imageVideoSink, NULL, NULL,
242            NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
243            iidArray /*const XAInterfaceID *pInterfaceIds*/,
244            required /*const XAboolean *pInterfaceRequired*/);
245    assert(XA_RESULT_SUCCESS == res);
246
247    // release the Java string and UTF-8
248    (*env)->ReleaseStringUTFChars(env, filename, utf8);
249
250    // realize the player
251    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
252    assert(XA_RESULT_SUCCESS == res);
253
254    // get the play interface
255    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
256    assert(XA_RESULT_SUCCESS == res);
257
258    // get the stream information interface (for video size)
259    res = (*playerObj)->GetInterface(playerObj, XA_IID_STREAMINFORMATION, &playerStreamInfoItf);
260    assert(XA_RESULT_SUCCESS == res);
261
262    // get the volume interface
263    res = (*playerObj)->GetInterface(playerObj, XA_IID_VOLUME, &playerVolItf);
264    assert(XA_RESULT_SUCCESS == res);
265
266    // get the Android buffer queue interface
267    res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUE, &playerBQItf);
268    assert(XA_RESULT_SUCCESS == res);
269
270    // register the callback from which OpenMAX AL can retrieve the data to play
271    res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
272    assert(XA_RESULT_SUCCESS == res);
273
274    // we want to be notified of the video size once it's found, so we register a callback for that
275    res = (*playerStreamInfoItf)->RegisterStreamChangeCallback(playerStreamInfoItf,
276            StreamChangeCallback, NULL);
277
278    /* Fill our cache */
279    if (fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file) <= 0) {
280        LOGE("Error filling cache, exiting\n");
281        return JNI_FALSE;
282    }
283    /* Enqueue the content of our cache before starting to play,
284       we don't want to starve the player */
285    int i;
286    for (i=0 ; i < NB_BUFFERS ; i++) {
287        res = (*playerBQItf)->Enqueue(playerBQItf, NULL /*pBufferContext*/,
288                dataCache + i*BUFFER_SIZE, BUFFER_SIZE, NULL, 0);
289        assert(XA_RESULT_SUCCESS == res);
290    }
291
292    // prepare the player
293    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
294    assert(XA_RESULT_SUCCESS == res);
295
296    // set the volume
297    res = (*playerVolItf)->SetVolumeLevel(playerVolItf, 0);//-300);
298    assert(XA_RESULT_SUCCESS == res);
299
300    // start the playback
301    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
302        assert(XA_RESULT_SUCCESS == res);
303
304    return JNI_TRUE;
305}
306
307
308// set the playing state for the streaming media player
309void Java_com_example_nativemedia_NativeMedia_setPlayingStreamingMediaPlayer(JNIEnv* env,
310        jclass clazz, jboolean isPlaying)
311{
312    XAresult res;
313
314    // make sure the streaming media player was created
315    if (NULL != playerPlayItf) {
316
317        // set the player's state
318        res = (*playerPlayItf)->SetPlayState(playerPlayItf, isPlaying ?
319            XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED);
320        assert(XA_RESULT_SUCCESS == res);
321
322    }
323
324}
325
326
327// shut down the native media system
328void Java_com_example_nativemedia_NativeMedia_shutdown(JNIEnv* env, jclass clazz)
329{
330    // destroy streaming media player object, and invalidate all associated interfaces
331    if (playerObj != NULL) {
332        (*playerObj)->Destroy(playerObj);
333        playerObj = NULL;
334        playerPlayItf = NULL;
335        playerBQItf = NULL;
336    }
337
338    // destroy output mix object, and invalidate all associated interfaces
339    if (outputMixObject != NULL) {
340        (*outputMixObject)->Destroy(outputMixObject);
341        outputMixObject = NULL;
342    }
343
344    // destroy engine object, and invalidate all associated interfaces
345    if (engineObject != NULL) {
346        (*engineObject)->Destroy(engineObject);
347        engineObject = NULL;
348        engineEngine = NULL;
349    }
350
351    // close the file
352    if (file != NULL) {
353        fclose(file);
354        file = NULL;
355    }
356
357    // make sure we don't leak native windows
358    if (theNativeWindow != NULL) {
359        ANativeWindow_release(theNativeWindow);
360        theNativeWindow = NULL;
361    }
362}
363
364
365// set the surface
366void Java_com_example_nativemedia_NativeMedia_setSurface(JNIEnv *env, jclass clazz, jobject surface)
367{
368    // obtain a native window from a Java surface
369    theNativeWindow = ANativeWindow_fromSurface(env, surface);
370}
371
372
373// set the surface texture
374void Java_com_example_nativemedia_NativeMedia_setSurfaceTexture(JNIEnv *env, jclass clazz,
375        jobject surfaceTexture)
376{
377    // obtain a native window from a Java surface texture
378    theNativeWindow = ANativeWindow_fromSurfaceTexture(env, surfaceTexture);
379}
380