android_media_MediaSync.cpp revision dfb0e6236b573f1dea1e5182a38aa22a7dc10dc1
1/*
2 * Copyright 2015, The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "MediaSync-JNI"
19#include <utils/Log.h>
20
21#include "android_media_MediaSync.h"
22
23#include "android_media_AudioTrack.h"
24#include "android_runtime/AndroidRuntime.h"
25#include "android_runtime/android_view_Surface.h"
26#include "jni.h"
27#include "JNIHelp.h"
28
29#include <gui/Surface.h>
30
31#include <media/AudioTrack.h>
32#include <media/stagefright/MediaClock.h>
33#include <media/stagefright/MediaSync.h>
34#include <media/stagefright/foundation/ADebug.h>
35#include <media/stagefright/foundation/AString.h>
36
37#include <nativehelper/ScopedLocalRef.h>
38
39namespace android {
40
41struct fields_t {
42    jfieldID context;
43    jfieldID mediaTimestampMediaTimeUsID;
44    jfieldID mediaTimestampNanoTimeID;
45    jfieldID mediaTimestampClockRateID;
46};
47
48static fields_t gFields;
49
50////////////////////////////////////////////////////////////////////////////////
51
52JMediaSync::JMediaSync() {
53    mSync = MediaSync::create();
54}
55
56JMediaSync::~JMediaSync() {
57}
58
59status_t JMediaSync::configureSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
60    return mSync->configureSurface(bufferProducer);
61}
62
63status_t JMediaSync::configureAudioTrack(
64        const sp<AudioTrack> &audioTrack,
65        int32_t nativeSampleRateInHz) {
66    return mSync->configureAudioTrack(audioTrack, nativeSampleRateInHz);
67}
68
69status_t JMediaSync::createInputSurface(
70        sp<IGraphicBufferProducer>* bufferProducer) {
71    return mSync->createInputSurface(bufferProducer);
72}
73
74void JMediaSync::setPlaybackRate(float rate) {
75    mSync->setPlaybackRate(rate);
76}
77
78sp<const MediaClock> JMediaSync::getMediaClock() {
79    return mSync->getMediaClock();
80}
81
82status_t JMediaSync::updateQueuedAudioData(
83        int sizeInBytes, int64_t presentationTimeUs) {
84    return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
85}
86
87}  // namespace android
88
89////////////////////////////////////////////////////////////////////////////////
90
91using namespace android;
92
93static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) {
94    sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context);
95    if (sync != NULL) {
96        sync->incStrong(thiz);
97    }
98    if (old != NULL) {
99        old->decStrong(thiz);
100    }
101
102    env->SetLongField(thiz, gFields.context, (jlong)sync.get());
103
104    return old;
105}
106
107static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) {
108    return (JMediaSync *)env->GetLongField(thiz, gFields.context);
109}
110
111static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) {
112    setMediaSync(env, thiz, NULL);
113}
114
115static void throwExceptionAsNecessary(
116        JNIEnv *env, status_t err, const char *msg = NULL) {
117    switch (err) {
118        case INVALID_OPERATION:
119            jniThrowException(env, "java/lang/IllegalStateException", msg);
120            break;
121
122        case BAD_VALUE:
123            jniThrowException(env, "java/lang/IllegalArgumentException", msg);
124            break;
125
126        default:
127            break;
128    }
129}
130
131static void android_media_MediaSync_native_configureSurface(
132        JNIEnv *env, jobject thiz, jobject jsurface) {
133    ALOGV("android_media_MediaSync_configureSurface");
134
135    sp<JMediaSync> sync = getMediaSync(env, thiz);
136    if (sync == NULL) {
137        throwExceptionAsNecessary(env, INVALID_OPERATION);
138        return;
139    }
140
141    sp<IGraphicBufferProducer> bufferProducer;
142    if (jsurface != NULL) {
143        sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
144        if (surface != NULL) {
145            bufferProducer = surface->getIGraphicBufferProducer();
146        } else {
147            throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released");
148            return;
149        }
150    }
151
152    status_t err = sync->configureSurface(bufferProducer);
153
154    if (err == INVALID_OPERATION) {
155        throwExceptionAsNecessary(
156                env, INVALID_OPERATION, "Surface has already been configured");
157    } if (err != NO_ERROR) {
158        AString msg("Failed to connect to surface with error ");
159        msg.append(err);
160        throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
161    }
162}
163
164static void android_media_MediaSync_native_configureAudioTrack(
165        JNIEnv *env, jobject thiz, jobject jaudioTrack, jint nativeSampleRateInHz) {
166    ALOGV("android_media_MediaSync_configureAudioTrack");
167
168    sp<JMediaSync> sync = getMediaSync(env, thiz);
169    if (sync == NULL) {
170        throwExceptionAsNecessary(env, INVALID_OPERATION);
171        return;
172    }
173
174    sp<AudioTrack> audioTrack;
175    if (jaudioTrack != NULL) {
176        audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack);
177        if (audioTrack == NULL) {
178            throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released");
179            return;
180        }
181    }
182
183    status_t err = sync->configureAudioTrack(audioTrack, nativeSampleRateInHz);
184
185    if (err == INVALID_OPERATION) {
186        throwExceptionAsNecessary(
187                env, INVALID_OPERATION, "Audio track has already been configured");
188    } if (err != NO_ERROR) {
189        AString msg("Failed to configure audio track with error ");
190        msg.append(err);
191        throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
192    }
193}
194
195static jobject android_media_MediaSync_createInputSurface(
196        JNIEnv* env, jobject thiz) {
197    ALOGV("android_media_MediaSync_createInputSurface");
198
199    sp<JMediaSync> sync = getMediaSync(env, thiz);
200    if (sync == NULL) {
201        throwExceptionAsNecessary(env, INVALID_OPERATION);
202        return NULL;
203    }
204
205    // Tell the MediaSync that we want to use a Surface as input.
206    sp<IGraphicBufferProducer> bufferProducer;
207    status_t err = sync->createInputSurface(&bufferProducer);
208    if (err != NO_ERROR) {
209        throwExceptionAsNecessary(env, INVALID_OPERATION);
210        return NULL;
211    }
212
213    // Wrap the IGBP in a Java-language Surface.
214    return android_view_Surface_createFromIGraphicBufferProducer(env,
215            bufferProducer);
216}
217
218static void android_media_MediaSync_native_updateQueuedAudioData(
219        JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) {
220    sp<JMediaSync> sync = getMediaSync(env, thiz);
221    if (sync == NULL) {
222        throwExceptionAsNecessary(env, INVALID_OPERATION);
223        return;
224    }
225
226    status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
227    if (err != NO_ERROR) {
228        throwExceptionAsNecessary(env, err);
229        return;
230    }
231}
232
233static jboolean android_media_MediaSync_native_getTimestamp(
234        JNIEnv *env, jobject thiz, jobject timestamp) {
235    sp<JMediaSync> sync = getMediaSync(env, thiz);
236    if (sync == NULL) {
237        throwExceptionAsNecessary(env, INVALID_OPERATION);
238        return JNI_FALSE;
239    }
240
241    sp<const MediaClock> mediaClock = sync->getMediaClock();
242    if (mediaClock == NULL) {
243        return JNI_FALSE;
244    }
245
246    int64_t nowUs = ALooper::GetNowUs();
247    int64_t mediaUs = 0;
248    if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
249        return JNI_FALSE;
250    }
251
252    env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID,
253            (jlong)mediaUs);
254    env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID,
255            (jlong)(nowUs * 1000));
256    env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID,
257            (jfloat)mediaClock->getPlaybackRate());
258    return JNI_TRUE;
259}
260
261static void android_media_MediaSync_native_init(JNIEnv *env) {
262    ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync"));
263    CHECK(clazz.get() != NULL);
264
265    gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J");
266    CHECK(gFields.context != NULL);
267
268    clazz.reset(env->FindClass("android/media/MediaTimestamp"));
269    CHECK(clazz.get() != NULL);
270
271    gFields.mediaTimestampMediaTimeUsID =
272        env->GetFieldID(clazz.get(), "mediaTimeUs", "J");
273    CHECK(gFields.mediaTimestampMediaTimeUsID != NULL);
274
275    gFields.mediaTimestampNanoTimeID =
276        env->GetFieldID(clazz.get(), "nanoTime", "J");
277    CHECK(gFields.mediaTimestampNanoTimeID != NULL);
278
279    gFields.mediaTimestampClockRateID =
280        env->GetFieldID(clazz.get(), "clockRate", "F");
281    CHECK(gFields.mediaTimestampClockRateID != NULL);
282}
283
284static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) {
285    sp<JMediaSync> sync = new JMediaSync();
286
287    setMediaSync(env, thiz, sync);
288}
289
290static void android_media_MediaSync_native_setPlaybackRate(
291        JNIEnv *env, jobject thiz, jfloat rate) {
292    sp<JMediaSync> sync = getMediaSync(env, thiz);
293    if (sync == NULL) {
294        throwExceptionAsNecessary(env, INVALID_OPERATION);
295        return;
296    }
297
298    sync->setPlaybackRate(rate);
299}
300
301static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) {
302    android_media_MediaSync_release(env, thiz);
303}
304
305static JNINativeMethod gMethods[] = {
306    { "native_configureSurface",
307      "(Landroid/view/Surface;)V",
308      (void *)android_media_MediaSync_native_configureSurface },
309
310    { "native_configureAudioTrack",
311      "(Landroid/media/AudioTrack;I)V",
312      (void *)android_media_MediaSync_native_configureAudioTrack },
313
314    { "createInputSurface", "()Landroid/view/Surface;",
315      (void *)android_media_MediaSync_createInputSurface },
316
317    { "native_updateQueuedAudioData",
318      "(IJ)V",
319      (void *)android_media_MediaSync_native_updateQueuedAudioData },
320
321    { "native_getTimestamp",
322      "(Landroid/media/MediaTimestamp;)Z",
323      (void *)android_media_MediaSync_native_getTimestamp },
324
325    { "native_init", "()V", (void *)android_media_MediaSync_native_init },
326
327    { "native_setup", "()V", (void *)android_media_MediaSync_native_setup },
328
329    { "native_release", "()V", (void *)android_media_MediaSync_release },
330
331    { "native_setPlaybackRate", "(F)V", (void *)android_media_MediaSync_native_setPlaybackRate },
332
333    { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize },
334};
335
336int register_android_media_MediaSync(JNIEnv *env) {
337    return AndroidRuntime::registerNativeMethods(
338                   env, "android/media/MediaSync", gMethods, NELEM(gMethods));
339}
340