android_media_MediaSync.cpp revision 25b802d47249702b9e5d175b3e7144934b67553d
1/* 2 * Copyright 2015, The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "MediaSync-JNI" 19#include <utils/Log.h> 20 21#include "android_media_MediaSync.h" 22 23#include "android_media_AudioTrack.h" 24#include "android_media_SyncSettings.h" 25#include "android_runtime/AndroidRuntime.h" 26#include "android_runtime/android_view_Surface.h" 27#include "jni.h" 28#include "JNIHelp.h" 29 30#include <gui/Surface.h> 31 32#include <media/AudioTrack.h> 33#include <media/stagefright/MediaClock.h> 34#include <media/stagefright/MediaSync.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AString.h> 37 38#include <nativehelper/ScopedLocalRef.h> 39 40namespace android { 41 42struct fields_t { 43 jfieldID context; 44 jfieldID mediaTimestampMediaTimeUsID; 45 jfieldID mediaTimestampNanoTimeID; 46 jfieldID mediaTimestampClockRateID; 47}; 48 49static fields_t gFields; 50static SyncSettings::fields_t gSyncSettingsFields; 51 52//////////////////////////////////////////////////////////////////////////////// 53 54JMediaSync::JMediaSync() { 55 mSync = MediaSync::create(); 56} 57 58JMediaSync::~JMediaSync() { 59} 60 61status_t JMediaSync::configureSurface(const sp<IGraphicBufferProducer> &bufferProducer) { 62 return mSync->configureSurface(bufferProducer); 63} 64 65status_t JMediaSync::configureAudioTrack( 66 const sp<AudioTrack> &audioTrack, 67 int32_t nativeSampleRateInHz) { 68 return mSync->configureAudioTrack(audioTrack, nativeSampleRateInHz); 69} 70 71status_t JMediaSync::createInputSurface( 72 sp<IGraphicBufferProducer>* bufferProducer) { 73 return mSync->createInputSurface(bufferProducer); 74} 75 76status_t JMediaSync::setPlaybackRate(float rate) { 77 return mSync->setPlaybackRate(rate); 78} 79 80sp<const MediaClock> JMediaSync::getMediaClock() { 81 return mSync->getMediaClock(); 82} 83 84status_t JMediaSync::updateQueuedAudioData( 85 int sizeInBytes, int64_t presentationTimeUs) { 86 return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); 87} 88 89status_t JMediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) { 90 return mSync->getPlayTimeForPendingAudioFrames(outTimeUs); 91} 92 93} // namespace android 94 95//////////////////////////////////////////////////////////////////////////////// 96 97using namespace android; 98 99static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) { 100 sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context); 101 if (sync != NULL) { 102 sync->incStrong(thiz); 103 } 104 if (old != NULL) { 105 old->decStrong(thiz); 106 } 107 108 env->SetLongField(thiz, gFields.context, (jlong)sync.get()); 109 110 return old; 111} 112 113static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) { 114 return (JMediaSync *)env->GetLongField(thiz, gFields.context); 115} 116 117static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) { 118 setMediaSync(env, thiz, NULL); 119} 120 121static void throwExceptionAsNecessary( 122 JNIEnv *env, status_t err, const char *msg = NULL) { 123 switch (err) { 124 case NO_ERROR: 125 break; 126 127 case BAD_VALUE: 128 jniThrowException(env, "java/lang/IllegalArgumentException", msg); 129 break; 130 131 case NO_INIT: 132 case INVALID_OPERATION: 133 default: 134 if (err > 0) { 135 break; 136 } 137 AString msgWithErrorCode(msg); 138 msgWithErrorCode.append(" error:"); 139 msgWithErrorCode.append(err); 140 jniThrowException(env, "java/lang/IllegalStateException", msgWithErrorCode.c_str()); 141 break; 142 } 143} 144 145static void android_media_MediaSync_native_configureSurface( 146 JNIEnv *env, jobject thiz, jobject jsurface) { 147 ALOGV("android_media_MediaSync_configureSurface"); 148 149 sp<JMediaSync> sync = getMediaSync(env, thiz); 150 if (sync == NULL) { 151 throwExceptionAsNecessary(env, INVALID_OPERATION); 152 return; 153 } 154 155 sp<IGraphicBufferProducer> bufferProducer; 156 if (jsurface != NULL) { 157 sp<Surface> surface(android_view_Surface_getSurface(env, jsurface)); 158 if (surface != NULL) { 159 bufferProducer = surface->getIGraphicBufferProducer(); 160 } else { 161 throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released"); 162 return; 163 } 164 } 165 166 status_t err = sync->configureSurface(bufferProducer); 167 168 if (err == INVALID_OPERATION) { 169 throwExceptionAsNecessary( 170 env, INVALID_OPERATION, "Surface has already been configured"); 171 } if (err != NO_ERROR) { 172 AString msg("Failed to connect to surface with error "); 173 msg.append(err); 174 throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str()); 175 } 176} 177 178static void android_media_MediaSync_native_configureAudioTrack( 179 JNIEnv *env, jobject thiz, jobject jaudioTrack, jint nativeSampleRateInHz) { 180 ALOGV("android_media_MediaSync_configureAudioTrack"); 181 182 sp<JMediaSync> sync = getMediaSync(env, thiz); 183 if (sync == NULL) { 184 throwExceptionAsNecessary(env, INVALID_OPERATION); 185 return; 186 } 187 188 sp<AudioTrack> audioTrack; 189 if (jaudioTrack != NULL) { 190 audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack); 191 if (audioTrack == NULL) { 192 throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released"); 193 return; 194 } 195 } 196 197 status_t err = sync->configureAudioTrack(audioTrack, nativeSampleRateInHz); 198 199 if (err == INVALID_OPERATION) { 200 throwExceptionAsNecessary( 201 env, INVALID_OPERATION, "Audio track has already been configured"); 202 } if (err != NO_ERROR) { 203 AString msg("Failed to configure audio track with error "); 204 msg.append(err); 205 throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str()); 206 } 207} 208 209static jobject android_media_MediaSync_createInputSurface( 210 JNIEnv* env, jobject thiz) { 211 ALOGV("android_media_MediaSync_createInputSurface"); 212 213 sp<JMediaSync> sync = getMediaSync(env, thiz); 214 if (sync == NULL) { 215 throwExceptionAsNecessary(env, INVALID_OPERATION); 216 return NULL; 217 } 218 219 // Tell the MediaSync that we want to use a Surface as input. 220 sp<IGraphicBufferProducer> bufferProducer; 221 status_t err = sync->createInputSurface(&bufferProducer); 222 if (err != NO_ERROR) { 223 throwExceptionAsNecessary(env, INVALID_OPERATION); 224 return NULL; 225 } 226 227 // Wrap the IGBP in a Java-language Surface. 228 return android_view_Surface_createFromIGraphicBufferProducer(env, 229 bufferProducer); 230} 231 232static void android_media_MediaSync_native_updateQueuedAudioData( 233 JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) { 234 sp<JMediaSync> sync = getMediaSync(env, thiz); 235 if (sync == NULL) { 236 throwExceptionAsNecessary(env, INVALID_OPERATION); 237 return; 238 } 239 240 status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs); 241 if (err != NO_ERROR) { 242 throwExceptionAsNecessary(env, err); 243 return; 244 } 245} 246 247static jboolean android_media_MediaSync_native_getTimestamp( 248 JNIEnv *env, jobject thiz, jobject timestamp) { 249 sp<JMediaSync> sync = getMediaSync(env, thiz); 250 if (sync == NULL) { 251 throwExceptionAsNecessary(env, INVALID_OPERATION); 252 return JNI_FALSE; 253 } 254 255 sp<const MediaClock> mediaClock = sync->getMediaClock(); 256 if (mediaClock == NULL) { 257 return JNI_FALSE; 258 } 259 260 int64_t nowUs = ALooper::GetNowUs(); 261 int64_t mediaUs = 0; 262 if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) { 263 return JNI_FALSE; 264 } 265 266 env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID, 267 (jlong)mediaUs); 268 env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID, 269 (jlong)(nowUs * 1000)); 270 env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID, 271 (jfloat)mediaClock->getPlaybackRate()); 272 return JNI_TRUE; 273} 274 275static jlong android_media_MediaSync_native_getPlayTimeForPendingAudioFrames( 276 JNIEnv *env, jobject thiz) { 277 sp<JMediaSync> sync = getMediaSync(env, thiz); 278 if (sync == NULL) { 279 throwExceptionAsNecessary(env, INVALID_OPERATION); 280 } 281 282 int64_t playTimeUs = 0; 283 status_t err = sync->getPlayTimeForPendingAudioFrames(&playTimeUs); 284 if (err != NO_ERROR) { 285 throwExceptionAsNecessary(env, err); 286 } 287 return (jlong)playTimeUs; 288} 289 290static void 291android_media_MediaSync_setSyncSettings(JNIEnv *env, jobject thiz, jobject settings) 292{ 293 sp<JMediaSync> sync = getMediaSync(env, thiz); 294 if (sync == NULL) { 295 throwExceptionAsNecessary(env, INVALID_OPERATION); 296 return; 297 } 298 299 SyncSettings scs; 300 scs.fillFromJobject(env, gSyncSettingsFields, settings); 301 ALOGV("setSyncSettings: %d:%d %d:%d %d:%f %d:%f", 302 scs.syncSourceSet, scs.syncSource, 303 scs.audioAdjustModeSet, scs.audioAdjustMode, 304 scs.toleranceSet, scs.tolerance, 305 scs.frameRateSet, scs.frameRate); 306 307 // TODO: pass sync settings to mediasync when it supports it 308} 309 310static jobject 311android_media_MediaSync_getSyncSettings(JNIEnv *env, jobject thiz) 312{ 313 sp<JMediaSync> sync = getMediaSync(env, thiz); 314 if (sync == NULL) { 315 throwExceptionAsNecessary(env, INVALID_OPERATION); 316 return NULL; 317 } 318 319 SyncSettings scs; 320 scs.syncSource = 0; // SYNC_SOURCE_DEFAULT 321 scs.audioAdjustMode = 0; // AUDIO_ADJUST_MODE_DEFAULT 322 scs.tolerance = 0.f; 323 scs.frameRate = 0.f; 324 325 // TODO: get this from mediaplayer when it supports it 326 // process_media_player_call( 327 // env, thiz, mp->getSyncSettings(&scs), NULL, NULL); 328 ALOGV("getSyncSettings: %d %d %f %f", 329 scs.syncSource, scs.audioAdjustMode, scs.tolerance, scs.frameRate); 330 331 scs.syncSourceSet = true; 332 scs.audioAdjustModeSet = true; 333 scs.toleranceSet = true; 334 scs.frameRateSet = false; 335 336 return scs.asJobject(env, gSyncSettingsFields); 337} 338 339static void android_media_MediaSync_native_init(JNIEnv *env) { 340 ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync")); 341 CHECK(clazz.get() != NULL); 342 343 gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J"); 344 CHECK(gFields.context != NULL); 345 346 clazz.reset(env->FindClass("android/media/MediaTimestamp")); 347 CHECK(clazz.get() != NULL); 348 349 gFields.mediaTimestampMediaTimeUsID = 350 env->GetFieldID(clazz.get(), "mediaTimeUs", "J"); 351 CHECK(gFields.mediaTimestampMediaTimeUsID != NULL); 352 353 gFields.mediaTimestampNanoTimeID = 354 env->GetFieldID(clazz.get(), "nanoTime", "J"); 355 CHECK(gFields.mediaTimestampNanoTimeID != NULL); 356 357 gFields.mediaTimestampClockRateID = 358 env->GetFieldID(clazz.get(), "clockRate", "F"); 359 CHECK(gFields.mediaTimestampClockRateID != NULL); 360 361 gSyncSettingsFields.init(env); 362} 363 364static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) { 365 sp<JMediaSync> sync = new JMediaSync(); 366 367 setMediaSync(env, thiz, sync); 368} 369 370static void android_media_MediaSync_native_setPlaybackRate( 371 JNIEnv *env, jobject thiz, jfloat rate) { 372 sp<JMediaSync> sync = getMediaSync(env, thiz); 373 if (sync == NULL) { 374 throwExceptionAsNecessary(env, INVALID_OPERATION); 375 return; 376 } 377 378 status_t err = sync->setPlaybackRate(rate); 379 if (err != NO_ERROR) { 380 throwExceptionAsNecessary(env, err); 381 return; 382 } 383} 384 385static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) { 386 android_media_MediaSync_release(env, thiz); 387} 388 389static JNINativeMethod gMethods[] = { 390 { "native_configureSurface", 391 "(Landroid/view/Surface;)V", 392 (void *)android_media_MediaSync_native_configureSurface }, 393 394 { "native_configureAudioTrack", 395 "(Landroid/media/AudioTrack;I)V", 396 (void *)android_media_MediaSync_native_configureAudioTrack }, 397 398 { "createInputSurface", "()Landroid/view/Surface;", 399 (void *)android_media_MediaSync_createInputSurface }, 400 401 { "native_updateQueuedAudioData", 402 "(IJ)V", 403 (void *)android_media_MediaSync_native_updateQueuedAudioData }, 404 405 { "native_getTimestamp", 406 "(Landroid/media/MediaTimestamp;)Z", 407 (void *)android_media_MediaSync_native_getTimestamp }, 408 409 { "native_getPlayTimeForPendingAudioFrames", 410 "()J", 411 (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames }, 412 413 { "native_init", "()V", (void *)android_media_MediaSync_native_init }, 414 415 { "native_setup", "()V", (void *)android_media_MediaSync_native_setup }, 416 417 { "native_release", "()V", (void *)android_media_MediaSync_release }, 418 419 { "native_setPlaybackRate", "(F)V", (void *)android_media_MediaSync_native_setPlaybackRate }, 420 421 { "setSyncSettings", "(Landroid/media/SyncSettings;)V", (void *)android_media_MediaSync_setSyncSettings}, 422 423 { "getSyncSettings", "()Landroid/media/SyncSettings;", (void *)android_media_MediaSync_getSyncSettings}, 424 425 { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize }, 426}; 427 428int register_android_media_MediaSync(JNIEnv *env) { 429 return AndroidRuntime::registerNativeMethods( 430 env, "android/media/MediaSync", gMethods, NELEM(gMethods)); 431} 432