native-media-jni.c revision ad1ab1d13a9b043202b9d5cdc1d8c4ef66cbbca8
1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <assert.h>
18#include <jni.h>
19#include <pthread.h>
20#include <string.h>
21#define LOG_TAG "NativeMedia"
22#include <utils/Log.h>
23
24#include "OMXAL/OpenMAXAL.h"
25#include "OMXAL/OpenMAXAL_Android.h"
26
27#include <android/native_window_jni.h>
28
29// define as 1 if ANativeWindow * is not supported as a video sink
30#define NO_NATIVE_WINDOW 1
31
32// engine interfaces
33static XAObjectItf engineObject = NULL;
34static XAEngineItf engineEngine;
35
36// output mix interfaces
37static XAObjectItf outputMixObject = NULL;
38
39// streaming media player interfaces
40static XAObjectItf             playerObj = NULL;
41static XAPlayItf               playerPlayItf = NULL;
42static XAAndroidBufferQueueItf playerBQItf = NULL;
43// number of required interfaces for the MediaPlayer creation
44#define NB_MAXAL_INTERFACES 2 // XAAndroidBufferQueueItf and XAPlayItf
45
46// cached surface where the video display happens
47#if NO_NATIVE_WINDOW
48static jobject theSurfaceOrSurfaceTexture;
49#else
50static ANativeWindow* theNativeWindow;
51#endif
52
53// number of buffers in our buffer queue
54#define NB_BUFFERS 16
55// we're streaming MPEG-2 transport stream data, operate on transport stream block size
56#define MPEG2_TS_BLOCK_SIZE 188
57// determines how much memory we're dedicating to memory caching
58#define BUFFER_SIZE 20*MPEG2_TS_BLOCK_SIZE // 20 is an arbitrary number chosen here
59
60// where we cache in memory the data to play
61char dataCache[BUFFER_SIZE * NB_BUFFERS];
62// handle of the file to play
63FILE *file;
64
65// AndroidBufferQueueItf callback for an audio player
66XAresult AndroidBufferQueueCallback(
67        XAAndroidBufferQueueItf caller,
68        void *pContext,                /* input */
69        const void *pBufferData,       /* input */
70        XAuint32 dataSize,             /* input */
71        XAuint32 dataUsed,             /* input */
72        const XAAndroidBufferItem *pItems,/* input */
73        XAuint32 itemsLength           /* input */)
74{
75    // assert(BUFFER_SIZE <= dataSize);
76    size_t nbRead = fread((void*)pBufferData, 1, BUFFER_SIZE, file);
77    if (nbRead > 0) {
78        (*caller)->Enqueue(caller,
79                pBufferData /*pData*/,
80                nbRead /*dataLength*/,
81                NULL /*pMsg*/,
82                0 /*msgLength*/);
83    } else {
84        // signal EOS
85        XAAndroidBufferItem msgEos;
86        msgEos.itemKey = XA_ANDROID_ITEMKEY_EOS;
87        msgEos.itemSize = 0;
88        // EOS message has no parameters, so the total size of the message is the size of the key
89        //   plus the size if itemSize, both XAuint32
90        (*caller)->Enqueue(caller, NULL /*pData*/, 0 /*dataLength*/,
91                        &msgEos /*pMsg*/,
92                        sizeof(XAuint32)*2 /*msgLength*/);
93    }
94
95    return XA_RESULT_SUCCESS;
96}
97
98
99// create the engine and output mix objects
100void Java_com_example_nativemedia_NativeMedia_createEngine(JNIEnv* env, jclass clazz)
101{
102    XAresult res;
103
104    // create engine
105    res = xaCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
106    assert(XA_RESULT_SUCCESS == res);
107
108    // realize the engine
109    res = (*engineObject)->Realize(engineObject, XA_BOOLEAN_FALSE);
110    assert(XA_RESULT_SUCCESS == res);
111
112    // get the engine interface, which is needed in order to create other objects
113    res = (*engineObject)->GetInterface(engineObject, XA_IID_ENGINE, &engineEngine);
114    assert(XA_RESULT_SUCCESS == res);
115
116    // create output mix
117    res = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, NULL, NULL);
118    assert(XA_RESULT_SUCCESS == res);
119
120    // realize the output mix
121    res = (*outputMixObject)->Realize(outputMixObject, XA_BOOLEAN_FALSE);
122    assert(XA_RESULT_SUCCESS == res);
123
124}
125
126
127// create streaming media player
128jboolean Java_com_example_nativemedia_NativeMedia_createStreamingMediaPlayer(JNIEnv* env,
129        jclass clazz, jstring filename)
130{
131    XAresult res;
132
133    // convert Java string to UTF-8
134    const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
135    assert(NULL != utf8);
136
137    // open the file to play
138    file = fopen(utf8, "rb");
139    if (file == NULL) {
140        LOGE("Failed to open %s", utf8);
141        return JNI_FALSE;
142    }
143
144    // configure data source
145    XADataLocator_AndroidBufferQueue loc_abq = { XA_DATALOCATOR_ANDROIDBUFFERQUEUE, NB_BUFFERS };
146    XADataFormat_MIME format_mime = {
147            XA_DATAFORMAT_MIME, (XAchar *)"video/mp2ts", XA_CONTAINERTYPE_MPEG_TS };
148    XADataSource dataSrc = {&loc_abq, &format_mime};
149
150    // configure audio sink
151    XADataLocator_OutputMix loc_outmix = { XA_DATALOCATOR_OUTPUTMIX, outputMixObject };
152    XADataSink audioSnk = { &loc_outmix, NULL };
153
154    // configure image video sink
155    XADataLocator_NativeDisplay loc_nd = {
156            XA_DATALOCATOR_NATIVEDISPLAY,        // locatorType
157#if NO_NATIVE_WINDOW
158            (void *) theSurfaceOrSurfaceTexture, // jobject
159            (void *) env                         // JNIEnv *env
160#else
161            // later the video sink can be an ANativeWindow created from a Surface or SurfaceTexture
162            (void*)theNativeWindow,              // hWindow
163            // must be NULL
164            NULL                                 // hDisplay
165#endif
166    };
167    XADataSink imageVideoSink = {&loc_nd, NULL};
168
169    // declare interfaces to use
170    XAboolean     required[NB_MAXAL_INTERFACES] = {XA_BOOLEAN_TRUE, XA_BOOLEAN_TRUE};
171    XAInterfaceID iidArray[NB_MAXAL_INTERFACES] = {XA_IID_PLAY,     XA_IID_ANDROIDBUFFERQUEUE};
172
173    // create media player
174    res = (*engineEngine)->CreateMediaPlayer(engineEngine, &playerObj, &dataSrc,
175            NULL, &audioSnk, &imageVideoSink, NULL, NULL,
176            NB_MAXAL_INTERFACES /*XAuint32 numInterfaces*/,
177            iidArray /*const XAInterfaceID *pInterfaceIds*/,
178            required /*const XAboolean *pInterfaceRequired*/);
179    assert(XA_RESULT_SUCCESS == res);
180
181    // release the Java string and UTF-8
182    (*env)->ReleaseStringUTFChars(env, filename, utf8);
183
184    // realize the player
185    res = (*playerObj)->Realize(playerObj, XA_BOOLEAN_FALSE);
186    assert(XA_RESULT_SUCCESS == res);
187
188    // get the play interface
189    res = (*playerObj)->GetInterface(playerObj, XA_IID_PLAY, &playerPlayItf);
190    assert(XA_RESULT_SUCCESS == res);
191
192    // get the Android buffer queue interface
193    res = (*playerObj)->GetInterface(playerObj, XA_IID_ANDROIDBUFFERQUEUE, &playerBQItf);
194    assert(XA_RESULT_SUCCESS == res);
195
196    // register the callback from which OpenMAX AL can retrieve the data to play
197    res = (*playerBQItf)->RegisterCallback(playerBQItf, AndroidBufferQueueCallback, NULL);
198
199    /* Fill our cache */
200    if (fread(dataCache, 1, BUFFER_SIZE * NB_BUFFERS, file) <= 0) {
201        LOGE("Error filling cache, exiting\n");
202        return JNI_FALSE;
203    }
204    /* Enqueue the content of our cache before starting to play,
205       we don't want to starve the player */
206    int i;
207    for (i=0 ; i < NB_BUFFERS ; i++) {
208        res = (*playerBQItf)->Enqueue(playerBQItf, dataCache + i*BUFFER_SIZE, BUFFER_SIZE, NULL, 0);
209        assert(XA_RESULT_SUCCESS == res);
210    }
211
212
213    // prepare the player
214    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PAUSED);
215    assert(XA_RESULT_SUCCESS == res);
216
217    res = (*playerPlayItf)->SetPlayState(playerPlayItf, XA_PLAYSTATE_PLAYING);
218        assert(XA_RESULT_SUCCESS == res);
219
220    return JNI_TRUE;
221}
222
223
224// set the playing state for the streaming media player
225void Java_com_example_nativemedia_NativeMedia_setPlayingStreamingMediaPlayer(JNIEnv* env,
226        jclass clazz, jboolean isPlaying)
227{
228    XAresult res;
229
230    // make sure the streaming media player was created
231    if (NULL != playerPlayItf) {
232
233        // set the player's state
234        res = (*playerPlayItf)->SetPlayState(playerPlayItf, isPlaying ?
235            XA_PLAYSTATE_PLAYING : XA_PLAYSTATE_PAUSED);
236        assert(XA_RESULT_SUCCESS == res);
237
238    }
239
240}
241
242
243// shut down the native media system
244void Java_com_example_nativemedia_NativeMedia_shutdown(JNIEnv* env, jclass clazz)
245{
246    // destroy streaming media player object, and invalidate all associated interfaces
247    if (playerObj != NULL) {
248        (*playerObj)->Destroy(playerObj);
249        playerObj = NULL;
250        playerPlayItf = NULL;
251        playerBQItf = NULL;
252    }
253
254    // destroy output mix object, and invalidate all associated interfaces
255    if (outputMixObject != NULL) {
256        (*outputMixObject)->Destroy(outputMixObject);
257        outputMixObject = NULL;
258    }
259
260    // destroy engine object, and invalidate all associated interfaces
261    if (engineObject != NULL) {
262        (*engineObject)->Destroy(engineObject);
263        engineObject = NULL;
264        engineEngine = NULL;
265    }
266
267    // close the file
268    if (file != NULL) {
269        fclose(file);
270    }
271
272#if !NO_NATIVE_WINDOW
273    // make sure we don't leak native windows
274    if (theNativeWindow != NULL) {
275        ANativeWindow_release(theNativeWindow);
276    }
277#endif
278}
279
280
281// set the surface
282void Java_com_example_nativemedia_NativeMedia_setSurface(JNIEnv *env, jclass clazz, jobject surface)
283{
284#if NO_NATIVE_WINDOW
285    theSurfaceOrSurfaceTexture = surface;
286#else
287    // obtain a native window from a Java surface
288    theNativeWindow = ANativeWindow_fromSurface(env, surface);
289#endif
290}
291
292
293// set the surface texture
294void Java_com_example_nativemedia_NativeMedia_setSurfaceTexture(JNIEnv *env, jclass clazz,
295        jobject surfaceTexture)
296{
297#if NO_NATIVE_WINDOW
298    theSurfaceOrSurfaceTexture = surfaceTexture;
299#else
300    // obtain a native window from a Java surface texture
301    theNativeWindow = ANativeWindow_fromSurfaceTexture(env, surfaceTexture);
302#endif
303}
304