android_media_AudioTrack.cpp revision a1d80e3b1d210c60c6881a55ed39a4077ff66080
1/* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16//#define LOG_NDEBUG 0 17 18#define LOG_TAG "AudioTrack-JNI" 19 20#include <JNIHelp.h> 21#include <JniConstants.h> 22#include <android_runtime/AndroidRuntime.h> 23 24#include "ScopedBytes.h" 25 26#include <utils/Log.h> 27#include <media/AudioSystem.h> 28#include <media/AudioTrack.h> 29#include <audio_utils/primitives.h> 30 31#include <binder/MemoryHeapBase.h> 32#include <binder/MemoryBase.h> 33 34#include "android_media_AudioFormat.h" 35#include "android_media_AudioErrors.h" 36 37// ---------------------------------------------------------------------------- 38 39using namespace android; 40 41// ---------------------------------------------------------------------------- 42static const char* const kClassPathName = "android/media/AudioTrack"; 43static const char* const kAudioAttributesClassPathName = "android/media/AudioAttributes"; 44 45struct audio_track_fields_t { 46 // these fields provide access from C++ to the... 47 jmethodID postNativeEventInJava; //... event post callback method 48 jfieldID nativeTrackInJavaObj; // stores in Java the native AudioTrack object 49 jfieldID jniData; // stores in Java additional resources used by the native AudioTrack 50 jfieldID fieldStreamType; // ... mStreamType field in the AudioTrack Java object 51}; 52struct audio_attributes_fields_t { 53 jfieldID fieldUsage; // AudioAttributes.mUsage 54 jfieldID fieldContentType; // AudioAttributes.mContentType 55 jfieldID fieldFlags; // AudioAttributes.mFlags 56 jfieldID fieldTags; // AudioAttributes.mTags 57}; 58static audio_track_fields_t javaAudioTrackFields; 59static audio_attributes_fields_t javaAudioAttrFields; 60 61struct audiotrack_callback_cookie { 62 jclass audioTrack_class; 63 jobject audioTrack_ref; 64 bool busy; 65 Condition cond; 66}; 67 68// keep these values in sync with AudioTrack.java 69#define MODE_STATIC 0 70#define MODE_STREAM 1 71 72// ---------------------------------------------------------------------------- 73class AudioTrackJniStorage { 74 public: 75 sp<MemoryHeapBase> mMemHeap; 76 sp<MemoryBase> mMemBase; 77 audiotrack_callback_cookie mCallbackData; 78 79 AudioTrackJniStorage() { 80 mCallbackData.audioTrack_class = 0; 81 mCallbackData.audioTrack_ref = 0; 82 } 83 84 ~AudioTrackJniStorage() { 85 mMemBase.clear(); 86 mMemHeap.clear(); 87 } 88 89 bool allocSharedMem(int sizeInBytes) { 90 mMemHeap = new MemoryHeapBase(sizeInBytes, 0, "AudioTrack Heap Base"); 91 if (mMemHeap->getHeapID() < 0) { 92 return false; 93 } 94 mMemBase = new MemoryBase(mMemHeap, 0, sizeInBytes); 95 return true; 96 } 97}; 98 99static Mutex sLock; 100static SortedVector <audiotrack_callback_cookie *> sAudioTrackCallBackCookies; 101 102// ---------------------------------------------------------------------------- 103#define DEFAULT_OUTPUT_SAMPLE_RATE 44100 104 105#define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM -16 106#define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK -17 107#define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT -18 108#define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE -19 109#define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED -20 110 111// ---------------------------------------------------------------------------- 112static void audioCallback(int event, void* user, void *info) { 113 114 audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user; 115 { 116 Mutex::Autolock l(sLock); 117 if (sAudioTrackCallBackCookies.indexOf(callbackInfo) < 0) { 118 return; 119 } 120 callbackInfo->busy = true; 121 } 122 123 switch (event) { 124 case AudioTrack::EVENT_MARKER: { 125 JNIEnv *env = AndroidRuntime::getJNIEnv(); 126 if (user != NULL && env != NULL) { 127 env->CallStaticVoidMethod( 128 callbackInfo->audioTrack_class, 129 javaAudioTrackFields.postNativeEventInJava, 130 callbackInfo->audioTrack_ref, event, 0,0, NULL); 131 if (env->ExceptionCheck()) { 132 env->ExceptionDescribe(); 133 env->ExceptionClear(); 134 } 135 } 136 } break; 137 138 case AudioTrack::EVENT_NEW_POS: { 139 JNIEnv *env = AndroidRuntime::getJNIEnv(); 140 if (user != NULL && env != NULL) { 141 env->CallStaticVoidMethod( 142 callbackInfo->audioTrack_class, 143 javaAudioTrackFields.postNativeEventInJava, 144 callbackInfo->audioTrack_ref, event, 0,0, NULL); 145 if (env->ExceptionCheck()) { 146 env->ExceptionDescribe(); 147 env->ExceptionClear(); 148 } 149 } 150 } break; 151 } 152 153 { 154 Mutex::Autolock l(sLock); 155 callbackInfo->busy = false; 156 callbackInfo->cond.broadcast(); 157 } 158} 159 160 161// ---------------------------------------------------------------------------- 162static sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz) 163{ 164 Mutex::Autolock l(sLock); 165 AudioTrack* const at = 166 (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj); 167 return sp<AudioTrack>(at); 168} 169 170static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTrack>& at) 171{ 172 Mutex::Autolock l(sLock); 173 sp<AudioTrack> old = 174 (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj); 175 if (at.get()) { 176 at->incStrong((void*)setAudioTrack); 177 } 178 if (old != 0) { 179 old->decStrong((void*)setAudioTrack); 180 } 181 env->SetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (jlong)at.get()); 182 return old; 183} 184// ---------------------------------------------------------------------------- 185static jint 186android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this, 187 jobject jaa, 188 jint sampleRateInHertz, jint javaChannelMask, 189 jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession) { 190 191 ALOGV("sampleRate=%d, audioFormat(from Java)=%d, channel mask=%x, buffSize=%d", 192 sampleRateInHertz, audioFormat, javaChannelMask, buffSizeInBytes); 193 194 if (jaa == 0) { 195 ALOGE("Error creating AudioTrack: invalid audio attributes"); 196 return (jint) AUDIO_JAVA_ERROR; 197 } 198 199 // Java channel masks don't map directly to the native definition, but it's a simple shift 200 // to skip the two deprecated channel configurations "default" and "mono". 201 audio_channel_mask_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2; 202 203 if (!audio_is_output_channel(nativeChannelMask)) { 204 ALOGE("Error creating AudioTrack: invalid channel mask %#x.", javaChannelMask); 205 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK; 206 } 207 208 uint32_t channelCount = popcount(nativeChannelMask); 209 210 // check the format. 211 // This function was called from Java, so we compare the format against the Java constants 212 audio_format_t format = audioFormatToNative(audioFormat); 213 if (format == AUDIO_FORMAT_INVALID) { 214 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat); 215 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT; 216 } 217 218 // for the moment 8bitPCM in MODE_STATIC is not supported natively in the AudioTrack C++ class 219 // so we declare everything as 16bitPCM, the 8->16bit conversion for MODE_STATIC will be handled 220 // in android_media_AudioTrack_native_write_byte() 221 if ((format == AUDIO_FORMAT_PCM_8_BIT) 222 && (memoryMode == MODE_STATIC)) { 223 ALOGV("android_media_AudioTrack_setup(): requesting MODE_STATIC for 8bit \ 224 buff size of %dbytes, switching to 16bit, buff size of %dbytes", 225 buffSizeInBytes, 2*buffSizeInBytes); 226 format = AUDIO_FORMAT_PCM_16_BIT; 227 // we will need twice the memory to store the data 228 buffSizeInBytes *= 2; 229 } 230 231 // compute the frame count 232 size_t frameCount; 233 if (audio_is_linear_pcm(format)) { 234 const size_t bytesPerSample = audio_bytes_per_sample(format); 235 frameCount = buffSizeInBytes / (channelCount * bytesPerSample); 236 } else { 237 frameCount = buffSizeInBytes; 238 } 239 240 jclass clazz = env->GetObjectClass(thiz); 241 if (clazz == NULL) { 242 ALOGE("Can't find %s when setting up callback.", kClassPathName); 243 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED; 244 } 245 246 if (jSession == NULL) { 247 ALOGE("Error creating AudioTrack: invalid session ID pointer"); 248 return (jint) AUDIO_JAVA_ERROR; 249 } 250 251 jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL); 252 if (nSession == NULL) { 253 ALOGE("Error creating AudioTrack: Error retrieving session id pointer"); 254 return (jint) AUDIO_JAVA_ERROR; 255 } 256 int sessionId = nSession[0]; 257 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 258 nSession = NULL; 259 260 // create the native AudioTrack object 261 sp<AudioTrack> lpTrack = new AudioTrack(); 262 263 audio_attributes_t *paa = NULL; 264 // read the AudioAttributes values 265 paa = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t)); 266 const jstring jtags = (jstring) env->GetObjectField(jaa, javaAudioAttrFields.fieldTags); 267 const char* tags = env->GetStringUTFChars(jtags, NULL); 268 // copying array size -1, char array for tags was calloc'd, no need to NULL-terminate it 269 strncpy(paa->tags, tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1); 270 env->ReleaseStringUTFChars(jtags, tags); 271 paa->usage = (audio_usage_t) env->GetIntField(jaa, javaAudioAttrFields.fieldUsage); 272 paa->content_type = 273 (audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType); 274 paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags); 275 276 ALOGV("AudioTrack_setup for usage=%d content=%d flags=0x%#x tags=%s", 277 paa->usage, paa->content_type, paa->flags, paa->tags); 278 279 // initialize the callback information: 280 // this data will be passed with every AudioTrack callback 281 AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage(); 282 lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz); 283 // we use a weak reference so the AudioTrack object can be garbage collected. 284 lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this); 285 lpJniStorage->mCallbackData.busy = false; 286 287 // initialize the native AudioTrack object 288 status_t status = NO_ERROR; 289 switch (memoryMode) { 290 case MODE_STREAM: 291 292 status = lpTrack->set( 293 AUDIO_STREAM_DEFAULT,// stream type 294 sampleRateInHertz, 295 format,// word length, PCM 296 nativeChannelMask, 297 frameCount, 298 AUDIO_OUTPUT_FLAG_NONE, 299 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user) 300 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack 301 0,// shared mem 302 true,// thread can call Java 303 sessionId,// audio session ID 304 AudioTrack::TRANSFER_DEFAULT, // default transfer mode 305 NULL, // default offloadInfo 306 -1, -1, // default uid, pid values 307 paa); 308 break; 309 310 case MODE_STATIC: 311 // AudioTrack is using shared memory 312 313 if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) { 314 ALOGE("Error creating AudioTrack in static mode: error creating mem heap base"); 315 goto native_init_failure; 316 } 317 318 status = lpTrack->set( 319 AUDIO_STREAM_DEFAULT,// stream type 320 sampleRateInHertz, 321 format,// word length, PCM 322 nativeChannelMask, 323 frameCount, 324 AUDIO_OUTPUT_FLAG_NONE, 325 audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)); 326 0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack 327 lpJniStorage->mMemBase,// shared mem 328 true,// thread can call Java 329 sessionId,// audio session ID 330 AudioTrack::TRANSFER_DEFAULT, // default transfer mode 331 NULL, // default offloadInfo 332 -1, -1, // default uid, pid values 333 paa); 334 break; 335 336 default: 337 ALOGE("Unknown mode %d", memoryMode); 338 goto native_init_failure; 339 } 340 341 if (status != NO_ERROR) { 342 ALOGE("Error %d initializing AudioTrack", status); 343 goto native_init_failure; 344 } 345 346 nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL); 347 if (nSession == NULL) { 348 ALOGE("Error creating AudioTrack: Error retrieving session id pointer"); 349 goto native_init_failure; 350 } 351 // read the audio session ID back from AudioTrack in case we create a new session 352 nSession[0] = lpTrack->getSessionId(); 353 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 354 nSession = NULL; 355 356 { // scope for the lock 357 Mutex::Autolock l(sLock); 358 sAudioTrackCallBackCookies.add(&lpJniStorage->mCallbackData); 359 } 360 // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field 361 // of the Java object (in mNativeTrackInJavaObj) 362 setAudioTrack(env, thiz, lpTrack); 363 364 // save the JNI resources so we can free them later 365 //ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage); 366 env->SetLongField(thiz, javaAudioTrackFields.jniData, (jlong)lpJniStorage); 367 368 // since we had audio attributes, the stream type was derived from them during the 369 // creation of the native AudioTrack: push the same value to the Java object 370 env->SetIntField(thiz, javaAudioTrackFields.fieldStreamType, (jint) lpTrack->streamType()); 371 // audio attributes were copied in AudioTrack creation 372 free(paa); 373 paa = NULL; 374 375 376 return (jint) AUDIO_JAVA_SUCCESS; 377 378 // failures: 379native_init_failure: 380 if (paa != NULL) { 381 free(paa); 382 } 383 if (nSession != NULL) { 384 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0); 385 } 386 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class); 387 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref); 388 delete lpJniStorage; 389 env->SetLongField(thiz, javaAudioTrackFields.jniData, 0); 390 391 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED; 392} 393 394 395// ---------------------------------------------------------------------------- 396static void 397android_media_AudioTrack_start(JNIEnv *env, jobject thiz) 398{ 399 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 400 if (lpTrack == NULL) { 401 jniThrowException(env, "java/lang/IllegalStateException", 402 "Unable to retrieve AudioTrack pointer for start()"); 403 return; 404 } 405 406 lpTrack->start(); 407} 408 409 410// ---------------------------------------------------------------------------- 411static void 412android_media_AudioTrack_stop(JNIEnv *env, jobject thiz) 413{ 414 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 415 if (lpTrack == NULL) { 416 jniThrowException(env, "java/lang/IllegalStateException", 417 "Unable to retrieve AudioTrack pointer for stop()"); 418 return; 419 } 420 421 lpTrack->stop(); 422} 423 424 425// ---------------------------------------------------------------------------- 426static void 427android_media_AudioTrack_pause(JNIEnv *env, jobject thiz) 428{ 429 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 430 if (lpTrack == NULL) { 431 jniThrowException(env, "java/lang/IllegalStateException", 432 "Unable to retrieve AudioTrack pointer for pause()"); 433 return; 434 } 435 436 lpTrack->pause(); 437} 438 439 440// ---------------------------------------------------------------------------- 441static void 442android_media_AudioTrack_flush(JNIEnv *env, jobject thiz) 443{ 444 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 445 if (lpTrack == NULL) { 446 jniThrowException(env, "java/lang/IllegalStateException", 447 "Unable to retrieve AudioTrack pointer for flush()"); 448 return; 449 } 450 451 lpTrack->flush(); 452} 453 454// ---------------------------------------------------------------------------- 455static void 456android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol ) 457{ 458 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 459 if (lpTrack == NULL) { 460 jniThrowException(env, "java/lang/IllegalStateException", 461 "Unable to retrieve AudioTrack pointer for setVolume()"); 462 return; 463 } 464 465 lpTrack->setVolume(leftVol, rightVol); 466} 467 468// ---------------------------------------------------------------------------- 469 470#define CALLBACK_COND_WAIT_TIMEOUT_MS 1000 471static void android_media_AudioTrack_release(JNIEnv *env, jobject thiz) { 472 sp<AudioTrack> lpTrack = setAudioTrack(env, thiz, 0); 473 if (lpTrack == NULL) { 474 return; 475 } 476 //ALOGV("deleting lpTrack: %x\n", (int)lpTrack); 477 lpTrack->stop(); 478 479 // delete the JNI data 480 AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField( 481 thiz, javaAudioTrackFields.jniData); 482 // reset the native resources in the Java object so any attempt to access 483 // them after a call to release fails. 484 env->SetLongField(thiz, javaAudioTrackFields.jniData, 0); 485 486 if (pJniStorage) { 487 Mutex::Autolock l(sLock); 488 audiotrack_callback_cookie *lpCookie = &pJniStorage->mCallbackData; 489 //ALOGV("deleting pJniStorage: %x\n", (int)pJniStorage); 490 while (lpCookie->busy) { 491 if (lpCookie->cond.waitRelative(sLock, 492 milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) != 493 NO_ERROR) { 494 break; 495 } 496 } 497 sAudioTrackCallBackCookies.remove(lpCookie); 498 // delete global refs created in native_setup 499 env->DeleteGlobalRef(lpCookie->audioTrack_class); 500 env->DeleteGlobalRef(lpCookie->audioTrack_ref); 501 delete pJniStorage; 502 } 503} 504 505 506// ---------------------------------------------------------------------------- 507static void android_media_AudioTrack_finalize(JNIEnv *env, jobject thiz) { 508 //ALOGV("android_media_AudioTrack_finalize jobject: %x\n", (int)thiz); 509 android_media_AudioTrack_release(env, thiz); 510} 511 512// ---------------------------------------------------------------------------- 513jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const jbyte* data, 514 jint offsetInBytes, jint sizeInBytes, bool blocking = true) { 515 // give the data to the native AudioTrack object (the data starts at the offset) 516 ssize_t written = 0; 517 // regular write() or copy the data to the AudioTrack's shared memory? 518 if (track->sharedBuffer() == 0) { 519 written = track->write(data + offsetInBytes, sizeInBytes, blocking); 520 // for compatibility with earlier behavior of write(), return 0 in this case 521 if (written == (ssize_t) WOULD_BLOCK) { 522 written = 0; 523 } 524 } else { 525 const audio_format_t format = audioFormatToNative(audioFormat); 526 switch (format) { 527 528 default: 529 case AUDIO_FORMAT_PCM_FLOAT: 530 case AUDIO_FORMAT_PCM_16_BIT: { 531 // writing to shared memory, check for capacity 532 if ((size_t)sizeInBytes > track->sharedBuffer()->size()) { 533 sizeInBytes = track->sharedBuffer()->size(); 534 } 535 memcpy(track->sharedBuffer()->pointer(), data + offsetInBytes, sizeInBytes); 536 written = sizeInBytes; 537 } break; 538 539 case AUDIO_FORMAT_PCM_8_BIT: { 540 // data contains 8bit data we need to expand to 16bit before copying 541 // to the shared memory 542 // writing to shared memory, check for capacity, 543 // note that input data will occupy 2X the input space due to 8 to 16bit conversion 544 if (((size_t)sizeInBytes)*2 > track->sharedBuffer()->size()) { 545 sizeInBytes = track->sharedBuffer()->size() / 2; 546 } 547 int count = sizeInBytes; 548 int16_t *dst = (int16_t *)track->sharedBuffer()->pointer(); 549 const uint8_t *src = (const uint8_t *)(data + offsetInBytes); 550 memcpy_to_i16_from_u8(dst, src, count); 551 // even though we wrote 2*sizeInBytes, we only report sizeInBytes as written to hide 552 // the 8bit mixer restriction from the user of this function 553 written = sizeInBytes; 554 } break; 555 556 } 557 } 558 return written; 559 560} 561 562// ---------------------------------------------------------------------------- 563static jint android_media_AudioTrack_write_byte(JNIEnv *env, jobject thiz, 564 jbyteArray javaAudioData, 565 jint offsetInBytes, jint sizeInBytes, 566 jint javaAudioFormat, 567 jboolean isWriteBlocking) { 568 //ALOGV("android_media_AudioTrack_write_byte(offset=%d, sizeInBytes=%d) called", 569 // offsetInBytes, sizeInBytes); 570 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 571 if (lpTrack == NULL) { 572 jniThrowException(env, "java/lang/IllegalStateException", 573 "Unable to retrieve AudioTrack pointer for write()"); 574 return 0; 575 } 576 577 // get the pointer for the audio data from the java array 578 // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such 579 // a way that it becomes much more efficient. When doing so, we will have to prevent the 580 // AudioSystem callback to be called while in critical section (in case of media server 581 // process crash for instance) 582 jbyte* cAudioData = NULL; 583 if (javaAudioData) { 584 cAudioData = (jbyte *)env->GetByteArrayElements(javaAudioData, NULL); 585 if (cAudioData == NULL) { 586 ALOGE("Error retrieving source of audio data to play, can't play"); 587 return 0; // out of memory or no data to load 588 } 589 } else { 590 ALOGE("NULL java array of audio data to play, can't play"); 591 return 0; 592 } 593 594 jint written = writeToTrack(lpTrack, javaAudioFormat, cAudioData, offsetInBytes, sizeInBytes, 595 isWriteBlocking == JNI_TRUE /* blocking */); 596 597 env->ReleaseByteArrayElements(javaAudioData, cAudioData, 0); 598 599 //ALOGV("write wrote %d (tried %d) bytes in the native AudioTrack with offset %d", 600 // (int)written, (int)(sizeInBytes), (int)offsetInBytes); 601 return written; 602} 603 604 605// ---------------------------------------------------------------------------- 606static jint android_media_AudioTrack_write_native_bytes(JNIEnv *env, jobject thiz, 607 jbyteArray javaBytes, jint byteOffset, jint sizeInBytes, 608 jint javaAudioFormat, jboolean isWriteBlocking) { 609 //ALOGV("android_media_AudioTrack_write_native_bytes(offset=%d, sizeInBytes=%d) called", 610 // offsetInBytes, sizeInBytes); 611 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 612 if (lpTrack == NULL) { 613 jniThrowException(env, "java/lang/IllegalStateException", 614 "Unable to retrieve AudioTrack pointer for write()"); 615 return 0; 616 } 617 618 ScopedBytesRO bytes(env, javaBytes); 619 if (bytes.get() == NULL) { 620 ALOGE("Error retrieving source of audio data to play, can't play"); 621 return (jint)AUDIO_JAVA_BAD_VALUE; 622 } 623 624 jint written = writeToTrack(lpTrack, javaAudioFormat, bytes.get(), byteOffset, 625 sizeInBytes, isWriteBlocking == JNI_TRUE /* blocking */); 626 627 return written; 628} 629 630// ---------------------------------------------------------------------------- 631static jint android_media_AudioTrack_write_short(JNIEnv *env, jobject thiz, 632 jshortArray javaAudioData, 633 jint offsetInShorts, jint sizeInShorts, 634 jint javaAudioFormat) { 635 636 //ALOGV("android_media_AudioTrack_write_short(offset=%d, sizeInShorts=%d) called", 637 // offsetInShorts, sizeInShorts); 638 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 639 if (lpTrack == NULL) { 640 jniThrowException(env, "java/lang/IllegalStateException", 641 "Unable to retrieve AudioTrack pointer for write()"); 642 return 0; 643 } 644 645 // get the pointer for the audio data from the java array 646 // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such 647 // a way that it becomes much more efficient. When doing so, we will have to prevent the 648 // AudioSystem callback to be called while in critical section (in case of media server 649 // process crash for instance) 650 jshort* cAudioData = NULL; 651 if (javaAudioData) { 652 cAudioData = (jshort *)env->GetShortArrayElements(javaAudioData, NULL); 653 if (cAudioData == NULL) { 654 ALOGE("Error retrieving source of audio data to play, can't play"); 655 return 0; // out of memory or no data to load 656 } 657 } else { 658 ALOGE("NULL java array of audio data to play, can't play"); 659 return 0; 660 } 661 jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData, 662 offsetInShorts * sizeof(short), sizeInShorts * sizeof(short), 663 true /*blocking write, legacy behavior*/); 664 env->ReleaseShortArrayElements(javaAudioData, cAudioData, 0); 665 666 if (written > 0) { 667 written /= sizeof(short); 668 } 669 //ALOGV("write wrote %d (tried %d) shorts in the native AudioTrack with offset %d", 670 // (int)written, (int)(sizeInShorts), (int)offsetInShorts); 671 672 return written; 673} 674 675 676// ---------------------------------------------------------------------------- 677static jint android_media_AudioTrack_write_float(JNIEnv *env, jobject thiz, 678 jfloatArray javaAudioData, 679 jint offsetInFloats, jint sizeInFloats, 680 jint javaAudioFormat, 681 jboolean isWriteBlocking) { 682 683 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 684 if (lpTrack == NULL) { 685 jniThrowException(env, "java/lang/IllegalStateException", 686 "Unable to retrieve AudioTrack pointer for write()"); 687 return 0; 688 } 689 690 jfloat* cAudioData = NULL; 691 if (javaAudioData) { 692 cAudioData = (jfloat *)env->GetFloatArrayElements(javaAudioData, NULL); 693 if (cAudioData == NULL) { 694 ALOGE("Error retrieving source of audio data to play, can't play"); 695 return 0; // out of memory or no data to load 696 } 697 } else { 698 ALOGE("NULL java array of audio data to play, can't play"); 699 return 0; 700 } 701 jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData, 702 offsetInFloats * sizeof(float), sizeInFloats * sizeof(float), 703 isWriteBlocking == JNI_TRUE /* blocking */); 704 env->ReleaseFloatArrayElements(javaAudioData, cAudioData, 0); 705 706 if (written > 0) { 707 written /= sizeof(float); 708 } 709 710 return written; 711} 712 713 714// ---------------------------------------------------------------------------- 715static jint android_media_AudioTrack_get_native_frame_count(JNIEnv *env, jobject thiz) { 716 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 717 if (lpTrack == NULL) { 718 jniThrowException(env, "java/lang/IllegalStateException", 719 "Unable to retrieve AudioTrack pointer for frameCount()"); 720 return (jint)AUDIO_JAVA_ERROR; 721 } 722 723 return lpTrack->frameCount(); 724} 725 726 727// ---------------------------------------------------------------------------- 728static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env, jobject thiz, 729 jint sampleRateInHz) { 730 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 731 if (lpTrack == NULL) { 732 jniThrowException(env, "java/lang/IllegalStateException", 733 "Unable to retrieve AudioTrack pointer for setSampleRate()"); 734 return (jint)AUDIO_JAVA_ERROR; 735 } 736 return nativeToJavaStatus(lpTrack->setSampleRate(sampleRateInHz)); 737} 738 739 740// ---------------------------------------------------------------------------- 741static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env, jobject thiz) { 742 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 743 if (lpTrack == NULL) { 744 jniThrowException(env, "java/lang/IllegalStateException", 745 "Unable to retrieve AudioTrack pointer for getSampleRate()"); 746 return (jint)AUDIO_JAVA_ERROR; 747 } 748 return (jint) lpTrack->getSampleRate(); 749} 750 751 752// ---------------------------------------------------------------------------- 753static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz, 754 jint markerPos) { 755 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 756 if (lpTrack == NULL) { 757 jniThrowException(env, "java/lang/IllegalStateException", 758 "Unable to retrieve AudioTrack pointer for setMarkerPosition()"); 759 return (jint)AUDIO_JAVA_ERROR; 760 } 761 return nativeToJavaStatus( lpTrack->setMarkerPosition(markerPos) ); 762} 763 764 765// ---------------------------------------------------------------------------- 766static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env, jobject thiz) { 767 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 768 uint32_t markerPos = 0; 769 770 if (lpTrack == NULL) { 771 jniThrowException(env, "java/lang/IllegalStateException", 772 "Unable to retrieve AudioTrack pointer for getMarkerPosition()"); 773 return (jint)AUDIO_JAVA_ERROR; 774 } 775 lpTrack->getMarkerPosition(&markerPos); 776 return (jint)markerPos; 777} 778 779 780// ---------------------------------------------------------------------------- 781static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env, jobject thiz, 782 jint period) { 783 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 784 if (lpTrack == NULL) { 785 jniThrowException(env, "java/lang/IllegalStateException", 786 "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()"); 787 return (jint)AUDIO_JAVA_ERROR; 788 } 789 return nativeToJavaStatus( lpTrack->setPositionUpdatePeriod(period) ); 790} 791 792 793// ---------------------------------------------------------------------------- 794static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env, jobject thiz) { 795 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 796 uint32_t period = 0; 797 798 if (lpTrack == NULL) { 799 jniThrowException(env, "java/lang/IllegalStateException", 800 "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()"); 801 return (jint)AUDIO_JAVA_ERROR; 802 } 803 lpTrack->getPositionUpdatePeriod(&period); 804 return (jint)period; 805} 806 807 808// ---------------------------------------------------------------------------- 809static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz, 810 jint position) { 811 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 812 if (lpTrack == NULL) { 813 jniThrowException(env, "java/lang/IllegalStateException", 814 "Unable to retrieve AudioTrack pointer for setPosition()"); 815 return (jint)AUDIO_JAVA_ERROR; 816 } 817 return nativeToJavaStatus( lpTrack->setPosition(position) ); 818} 819 820 821// ---------------------------------------------------------------------------- 822static jint android_media_AudioTrack_get_position(JNIEnv *env, jobject thiz) { 823 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 824 uint32_t position = 0; 825 826 if (lpTrack == NULL) { 827 jniThrowException(env, "java/lang/IllegalStateException", 828 "Unable to retrieve AudioTrack pointer for getPosition()"); 829 return (jint)AUDIO_JAVA_ERROR; 830 } 831 lpTrack->getPosition(&position); 832 return (jint)position; 833} 834 835 836// ---------------------------------------------------------------------------- 837static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) { 838 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 839 840 if (lpTrack == NULL) { 841 jniThrowException(env, "java/lang/IllegalStateException", 842 "Unable to retrieve AudioTrack pointer for latency()"); 843 return (jint)AUDIO_JAVA_ERROR; 844 } 845 return (jint)lpTrack->latency(); 846} 847 848 849// ---------------------------------------------------------------------------- 850static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) { 851 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 852 853 if (lpTrack == NULL) { 854 ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()"); 855 return (jint)AUDIO_JAVA_ERROR; 856 } 857 AudioTimestamp timestamp; 858 status_t status = lpTrack->getTimestamp(timestamp); 859 if (status == OK) { 860 jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL); 861 if (nTimestamp == NULL) { 862 ALOGE("Unable to get array for getTimestamp()"); 863 return (jint)AUDIO_JAVA_ERROR; 864 } 865 nTimestamp[0] = (jlong) timestamp.mPosition; 866 nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec); 867 env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0); 868 } 869 return (jint) nativeToJavaStatus(status); 870} 871 872 873// ---------------------------------------------------------------------------- 874static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz, 875 jint loopStart, jint loopEnd, jint loopCount) { 876 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 877 if (lpTrack == NULL) { 878 jniThrowException(env, "java/lang/IllegalStateException", 879 "Unable to retrieve AudioTrack pointer for setLoop()"); 880 return (jint)AUDIO_JAVA_ERROR; 881 } 882 return nativeToJavaStatus( lpTrack->setLoop(loopStart, loopEnd, loopCount) ); 883} 884 885 886// ---------------------------------------------------------------------------- 887static jint android_media_AudioTrack_reload(JNIEnv *env, jobject thiz) { 888 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 889 if (lpTrack == NULL) { 890 jniThrowException(env, "java/lang/IllegalStateException", 891 "Unable to retrieve AudioTrack pointer for reload()"); 892 return (jint)AUDIO_JAVA_ERROR; 893 } 894 return nativeToJavaStatus( lpTrack->reload() ); 895} 896 897 898// ---------------------------------------------------------------------------- 899static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobject thiz, 900 jint javaStreamType) { 901 uint32_t afSamplingRate; 902 // convert the stream type from Java to native value 903 // FIXME: code duplication with android_media_AudioTrack_setup() 904 audio_stream_type_t nativeStreamType; 905 switch (javaStreamType) { 906 case AUDIO_STREAM_VOICE_CALL: 907 case AUDIO_STREAM_SYSTEM: 908 case AUDIO_STREAM_RING: 909 case AUDIO_STREAM_MUSIC: 910 case AUDIO_STREAM_ALARM: 911 case AUDIO_STREAM_NOTIFICATION: 912 case AUDIO_STREAM_BLUETOOTH_SCO: 913 case AUDIO_STREAM_DTMF: 914 nativeStreamType = (audio_stream_type_t) javaStreamType; 915 break; 916 default: 917 nativeStreamType = AUDIO_STREAM_DEFAULT; 918 break; 919 } 920 921 status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType); 922 if (status != NO_ERROR) { 923 ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d " 924 "in AudioTrack JNI", status, nativeStreamType); 925 return DEFAULT_OUTPUT_SAMPLE_RATE; 926 } else { 927 return afSamplingRate; 928 } 929} 930 931 932// ---------------------------------------------------------------------------- 933// returns the minimum required size for the successful creation of a streaming AudioTrack 934// returns -1 if there was an error querying the hardware. 935static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz, 936 jint sampleRateInHertz, jint channelCount, jint audioFormat) { 937 938 size_t frameCount; 939 const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT, 940 sampleRateInHertz); 941 if (status != NO_ERROR) { 942 ALOGE("AudioTrack::getMinFrameCount() for sample rate %d failed with status %d", 943 sampleRateInHertz, status); 944 return -1; 945 } 946 const audio_format_t format = audioFormatToNative(audioFormat); 947 if (audio_is_linear_pcm(format)) { 948 const size_t bytesPerSample = audio_bytes_per_sample(format); 949 return frameCount * channelCount * bytesPerSample; 950 } else { 951 return frameCount; 952 } 953} 954 955// ---------------------------------------------------------------------------- 956static jint 957android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level ) 958{ 959 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 960 if (lpTrack == NULL ) { 961 jniThrowException(env, "java/lang/IllegalStateException", 962 "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()"); 963 return -1; 964 } 965 966 status_t status = lpTrack->setAuxEffectSendLevel(level); 967 if (status != NO_ERROR) { 968 ALOGE("AudioTrack::setAuxEffectSendLevel() for level %g failed with status %d", 969 level, status); 970 } 971 return (jint) status; 972} 973 974// ---------------------------------------------------------------------------- 975static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env, jobject thiz, 976 jint effectId) { 977 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz); 978 if (lpTrack == NULL) { 979 jniThrowException(env, "java/lang/IllegalStateException", 980 "Unable to retrieve AudioTrack pointer for attachAuxEffect()"); 981 return (jint)AUDIO_JAVA_ERROR; 982 } 983 return nativeToJavaStatus( lpTrack->attachAuxEffect(effectId) ); 984} 985 986// ---------------------------------------------------------------------------- 987// ---------------------------------------------------------------------------- 988static JNINativeMethod gMethods[] = { 989 // name, signature, funcPtr 990 {"native_start", "()V", (void *)android_media_AudioTrack_start}, 991 {"native_stop", "()V", (void *)android_media_AudioTrack_stop}, 992 {"native_pause", "()V", (void *)android_media_AudioTrack_pause}, 993 {"native_flush", "()V", (void *)android_media_AudioTrack_flush}, 994 {"native_setup", "(Ljava/lang/Object;Ljava/lang/Object;IIIII[I)I", 995 (void *)android_media_AudioTrack_setup}, 996 {"native_finalize", "()V", (void *)android_media_AudioTrack_finalize}, 997 {"native_release", "()V", (void *)android_media_AudioTrack_release}, 998 {"native_write_byte", "([BIIIZ)I",(void *)android_media_AudioTrack_write_byte}, 999 {"native_write_native_bytes", 1000 "(Ljava/lang/Object;IIIZ)I", 1001 (void *)android_media_AudioTrack_write_native_bytes}, 1002 {"native_write_short", "([SIII)I", (void *)android_media_AudioTrack_write_short}, 1003 {"native_write_float", "([FIIIZ)I",(void *)android_media_AudioTrack_write_float}, 1004 {"native_setVolume", "(FF)V", (void *)android_media_AudioTrack_set_volume}, 1005 {"native_get_native_frame_count", 1006 "()I", (void *)android_media_AudioTrack_get_native_frame_count}, 1007 {"native_set_playback_rate", 1008 "(I)I", (void *)android_media_AudioTrack_set_playback_rate}, 1009 {"native_get_playback_rate", 1010 "()I", (void *)android_media_AudioTrack_get_playback_rate}, 1011 {"native_set_marker_pos","(I)I", (void *)android_media_AudioTrack_set_marker_pos}, 1012 {"native_get_marker_pos","()I", (void *)android_media_AudioTrack_get_marker_pos}, 1013 {"native_set_pos_update_period", 1014 "(I)I", (void *)android_media_AudioTrack_set_pos_update_period}, 1015 {"native_get_pos_update_period", 1016 "()I", (void *)android_media_AudioTrack_get_pos_update_period}, 1017 {"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position}, 1018 {"native_get_position", "()I", (void *)android_media_AudioTrack_get_position}, 1019 {"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency}, 1020 {"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp}, 1021 {"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop}, 1022 {"native_reload_static", "()I", (void *)android_media_AudioTrack_reload}, 1023 {"native_get_output_sample_rate", 1024 "(I)I", (void *)android_media_AudioTrack_get_output_sample_rate}, 1025 {"native_get_min_buff_size", 1026 "(III)I", (void *)android_media_AudioTrack_get_min_buff_size}, 1027 {"native_setAuxEffectSendLevel", 1028 "(F)I", (void *)android_media_AudioTrack_setAuxEffectSendLevel}, 1029 {"native_attachAuxEffect", 1030 "(I)I", (void *)android_media_AudioTrack_attachAuxEffect}, 1031}; 1032 1033 1034// field names found in android/media/AudioTrack.java 1035#define JAVA_POSTEVENT_CALLBACK_NAME "postEventFromNative" 1036#define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME "mNativeTrackInJavaObj" 1037#define JAVA_JNIDATA_FIELD_NAME "mJniData" 1038#define JAVA_STREAMTYPE_FIELD_NAME "mStreamType" 1039 1040// ---------------------------------------------------------------------------- 1041// preconditions: 1042// theClass is valid 1043bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className, 1044 const char* constName, int* constVal) { 1045 jfieldID javaConst = NULL; 1046 javaConst = pEnv->GetStaticFieldID(theClass, constName, "I"); 1047 if (javaConst != NULL) { 1048 *constVal = pEnv->GetStaticIntField(theClass, javaConst); 1049 return true; 1050 } else { 1051 ALOGE("Can't find %s.%s", className, constName); 1052 return false; 1053 } 1054} 1055 1056 1057// ---------------------------------------------------------------------------- 1058int register_android_media_AudioTrack(JNIEnv *env) 1059{ 1060 javaAudioTrackFields.nativeTrackInJavaObj = NULL; 1061 javaAudioTrackFields.postNativeEventInJava = NULL; 1062 1063 // Get the AudioTrack class 1064 jclass audioTrackClass = env->FindClass(kClassPathName); 1065 if (audioTrackClass == NULL) { 1066 ALOGE("Can't find %s", kClassPathName); 1067 return -1; 1068 } 1069 1070 // Get the postEvent method 1071 javaAudioTrackFields.postNativeEventInJava = env->GetStaticMethodID( 1072 audioTrackClass, 1073 JAVA_POSTEVENT_CALLBACK_NAME, "(Ljava/lang/Object;IIILjava/lang/Object;)V"); 1074 if (javaAudioTrackFields.postNativeEventInJava == NULL) { 1075 ALOGE("Can't find AudioTrack.%s", JAVA_POSTEVENT_CALLBACK_NAME); 1076 return -1; 1077 } 1078 1079 // Get the variables fields 1080 // nativeTrackInJavaObj 1081 javaAudioTrackFields.nativeTrackInJavaObj = env->GetFieldID( 1082 audioTrackClass, 1083 JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "J"); 1084 if (javaAudioTrackFields.nativeTrackInJavaObj == NULL) { 1085 ALOGE("Can't find AudioTrack.%s", JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME); 1086 return -1; 1087 } 1088 // jniData 1089 javaAudioTrackFields.jniData = env->GetFieldID( 1090 audioTrackClass, 1091 JAVA_JNIDATA_FIELD_NAME, "J"); 1092 if (javaAudioTrackFields.jniData == NULL) { 1093 ALOGE("Can't find AudioTrack.%s", JAVA_JNIDATA_FIELD_NAME); 1094 return -1; 1095 } 1096 // fieldStreamType 1097 javaAudioTrackFields.fieldStreamType = env->GetFieldID(audioTrackClass, 1098 JAVA_STREAMTYPE_FIELD_NAME, "I"); 1099 if (javaAudioTrackFields.fieldStreamType == NULL) { 1100 ALOGE("Can't find AudioTrack.%s", JAVA_STREAMTYPE_FIELD_NAME); 1101 return -1; 1102 } 1103 1104 // Get the AudioAttributes class and fields 1105 jclass audioAttrClass = env->FindClass(kAudioAttributesClassPathName); 1106 if (audioAttrClass == NULL) { 1107 ALOGE("Can't find %s", kAudioAttributesClassPathName); 1108 return -1; 1109 } 1110 jclass audioAttributesClassRef = (jclass)env->NewGlobalRef(audioAttrClass); 1111 javaAudioAttrFields.fieldUsage = env->GetFieldID(audioAttributesClassRef, "mUsage", "I"); 1112 javaAudioAttrFields.fieldContentType 1113 = env->GetFieldID(audioAttributesClassRef, "mContentType", "I"); 1114 javaAudioAttrFields.fieldFlags = env->GetFieldID(audioAttributesClassRef, "mFlags", "I"); 1115 javaAudioAttrFields.fieldTags = env->GetFieldID(audioAttributesClassRef, "mFormattedTags", 1116 "Ljava/lang/String;"); 1117 env->DeleteGlobalRef(audioAttributesClassRef); 1118 if (javaAudioAttrFields.fieldUsage == NULL || javaAudioAttrFields.fieldContentType == NULL 1119 || javaAudioAttrFields.fieldFlags == NULL || javaAudioAttrFields.fieldTags == NULL) { 1120 ALOGE("Can't initialize AudioAttributes fields"); 1121 return -1; 1122 } 1123 1124 return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods)); 1125} 1126 1127 1128// ---------------------------------------------------------------------------- 1129