android_media_AudioTrack.cpp revision 2defed988f74c98d5af8d02551ebf7262490cc5b
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16//#define LOG_NDEBUG 0
17
18#define LOG_TAG "AudioTrack-JNI"
19
20#include <JNIHelp.h>
21#include <JniConstants.h>
22#include <android_runtime/AndroidRuntime.h>
23
24#include "ScopedBytes.h"
25
26#include <utils/Log.h>
27#include <media/AudioSystem.h>
28#include <media/AudioTrack.h>
29#include <audio_utils/primitives.h>
30
31#include <binder/MemoryHeapBase.h>
32#include <binder/MemoryBase.h>
33
34#include "android_media_AudioFormat.h"
35
36// ----------------------------------------------------------------------------
37
38using namespace android;
39
40// ----------------------------------------------------------------------------
41static const char* const kClassPathName = "android/media/AudioTrack";
42
43struct fields_t {
44    // these fields provide access from C++ to the...
45    jmethodID postNativeEventInJava; //... event post callback method
46    jfieldID  nativeTrackInJavaObj;  // stores in Java the native AudioTrack object
47    jfieldID  jniData;      // stores in Java additional resources used by the native AudioTrack
48};
49static fields_t javaAudioTrackFields;
50
51struct audiotrack_callback_cookie {
52    jclass      audioTrack_class;
53    jobject     audioTrack_ref;
54    bool        busy;
55    Condition   cond;
56};
57
58// keep these values in sync with AudioTrack.java
59#define MODE_STATIC 0
60#define MODE_STREAM 1
61
62// ----------------------------------------------------------------------------
63class AudioTrackJniStorage {
64    public:
65        sp<MemoryHeapBase>         mMemHeap;
66        sp<MemoryBase>             mMemBase;
67        audiotrack_callback_cookie mCallbackData;
68        audio_stream_type_t        mStreamType;
69
70    AudioTrackJniStorage() {
71        mCallbackData.audioTrack_class = 0;
72        mCallbackData.audioTrack_ref = 0;
73        mStreamType = AUDIO_STREAM_DEFAULT;
74    }
75
76    ~AudioTrackJniStorage() {
77        mMemBase.clear();
78        mMemHeap.clear();
79    }
80
81    bool allocSharedMem(int sizeInBytes) {
82        mMemHeap = new MemoryHeapBase(sizeInBytes, 0, "AudioTrack Heap Base");
83        if (mMemHeap->getHeapID() < 0) {
84            return false;
85        }
86        mMemBase = new MemoryBase(mMemHeap, 0, sizeInBytes);
87        return true;
88    }
89};
90
91static Mutex sLock;
92static SortedVector <audiotrack_callback_cookie *> sAudioTrackCallBackCookies;
93
94// ----------------------------------------------------------------------------
95#define DEFAULT_OUTPUT_SAMPLE_RATE   44100
96
97#define AUDIOTRACK_SUCCESS                         0
98#define AUDIOTRACK_ERROR                           -1
99#define AUDIOTRACK_ERROR_BAD_VALUE                 -2
100#define AUDIOTRACK_ERROR_INVALID_OPERATION         -3
101#define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM         -16
102#define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK  -17
103#define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT       -18
104#define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE   -19
105#define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED    -20
106
107
108jint android_media_translateErrorCode(int code) {
109    switch (code) {
110    case NO_ERROR:
111        return AUDIOTRACK_SUCCESS;
112    case BAD_VALUE:
113        return AUDIOTRACK_ERROR_BAD_VALUE;
114    case INVALID_OPERATION:
115        return AUDIOTRACK_ERROR_INVALID_OPERATION;
116    default:
117        return AUDIOTRACK_ERROR;
118    }
119}
120
121
122// ----------------------------------------------------------------------------
123static void audioCallback(int event, void* user, void *info) {
124
125    audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user;
126    {
127        Mutex::Autolock l(sLock);
128        if (sAudioTrackCallBackCookies.indexOf(callbackInfo) < 0) {
129            return;
130        }
131        callbackInfo->busy = true;
132    }
133
134    switch (event) {
135    case AudioTrack::EVENT_MARKER: {
136        JNIEnv *env = AndroidRuntime::getJNIEnv();
137        if (user != NULL && env != NULL) {
138            env->CallStaticVoidMethod(
139                callbackInfo->audioTrack_class,
140                javaAudioTrackFields.postNativeEventInJava,
141                callbackInfo->audioTrack_ref, event, 0,0, NULL);
142            if (env->ExceptionCheck()) {
143                env->ExceptionDescribe();
144                env->ExceptionClear();
145            }
146        }
147        } break;
148
149    case AudioTrack::EVENT_NEW_POS: {
150        JNIEnv *env = AndroidRuntime::getJNIEnv();
151        if (user != NULL && env != NULL) {
152            env->CallStaticVoidMethod(
153                callbackInfo->audioTrack_class,
154                javaAudioTrackFields.postNativeEventInJava,
155                callbackInfo->audioTrack_ref, event, 0,0, NULL);
156            if (env->ExceptionCheck()) {
157                env->ExceptionDescribe();
158                env->ExceptionClear();
159            }
160        }
161        } break;
162    }
163
164    {
165        Mutex::Autolock l(sLock);
166        callbackInfo->busy = false;
167        callbackInfo->cond.broadcast();
168    }
169}
170
171
172// ----------------------------------------------------------------------------
173static sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz)
174{
175    Mutex::Autolock l(sLock);
176    AudioTrack* const at =
177            (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
178    return sp<AudioTrack>(at);
179}
180
181static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTrack>& at)
182{
183    Mutex::Autolock l(sLock);
184    sp<AudioTrack> old =
185            (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
186    if (at.get()) {
187        at->incStrong((void*)setAudioTrack);
188    }
189    if (old != 0) {
190        old->decStrong((void*)setAudioTrack);
191    }
192    env->SetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (jlong)at.get());
193    return old;
194}
195
196// ----------------------------------------------------------------------------
197static jint
198android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
199        jint streamType, jint sampleRateInHertz, jint javaChannelMask,
200        jint audioFormat, jint buffSizeInBytes, jint memoryMode, jintArray jSession)
201{
202    ALOGV("sampleRate=%d, audioFormat(from Java)=%d, channel mask=%x, buffSize=%d",
203        sampleRateInHertz, audioFormat, javaChannelMask, buffSizeInBytes);
204
205    // Java channel masks don't map directly to the native definition, but it's a simple shift
206    // to skip the two deprecated channel configurations "default" and "mono".
207    audio_channel_mask_t nativeChannelMask = ((uint32_t)javaChannelMask) >> 2;
208
209    if (!audio_is_output_channel(nativeChannelMask)) {
210        ALOGE("Error creating AudioTrack: invalid channel mask %#x.", javaChannelMask);
211        return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK;
212    }
213
214    uint32_t channelCount = popcount(nativeChannelMask);
215
216    // stream type already checked in Java
217    audio_stream_type_t atStreamType = (audio_stream_type_t) streamType;
218
219    // check the format.
220    // This function was called from Java, so we compare the format against the Java constants
221    audio_format_t format = audioFormatToNative(audioFormat);
222    if (format == AUDIO_FORMAT_INVALID) {
223        ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat);
224        return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT;
225    }
226
227    // for the moment 8bitPCM in MODE_STATIC is not supported natively in the AudioTrack C++ class
228    // so we declare everything as 16bitPCM, the 8->16bit conversion for MODE_STATIC will be handled
229    // in android_media_AudioTrack_native_write_byte()
230    if ((format == AUDIO_FORMAT_PCM_8_BIT)
231        && (memoryMode == MODE_STATIC)) {
232        ALOGV("android_media_AudioTrack_setup(): requesting MODE_STATIC for 8bit \
233            buff size of %dbytes, switching to 16bit, buff size of %dbytes",
234            buffSizeInBytes, 2*buffSizeInBytes);
235        format = AUDIO_FORMAT_PCM_16_BIT;
236        // we will need twice the memory to store the data
237        buffSizeInBytes *= 2;
238    }
239
240    // compute the frame count
241    const size_t bytesPerSample = audio_bytes_per_sample(format);
242    size_t frameCount = buffSizeInBytes / (channelCount * bytesPerSample);
243
244    jclass clazz = env->GetObjectClass(thiz);
245    if (clazz == NULL) {
246        ALOGE("Can't find %s when setting up callback.", kClassPathName);
247        return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
248    }
249
250    if (jSession == NULL) {
251        ALOGE("Error creating AudioTrack: invalid session ID pointer");
252        return (jint) AUDIOTRACK_ERROR;
253    }
254
255    jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
256    if (nSession == NULL) {
257        ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
258        return (jint) AUDIOTRACK_ERROR;
259    }
260    int sessionId = nSession[0];
261    env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
262    nSession = NULL;
263
264    // create the native AudioTrack object
265    sp<AudioTrack> lpTrack = new AudioTrack();
266
267    // initialize the callback information:
268    // this data will be passed with every AudioTrack callback
269    AudioTrackJniStorage* lpJniStorage = new AudioTrackJniStorage();
270    lpJniStorage->mStreamType = atStreamType;
271    lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
272    // we use a weak reference so the AudioTrack object can be garbage collected.
273    lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
274    lpJniStorage->mCallbackData.busy = false;
275
276    // initialize the native AudioTrack object
277    status_t status = NO_ERROR;
278    switch (memoryMode) {
279    case MODE_STREAM:
280
281        status = lpTrack->set(
282            atStreamType,// stream type
283            sampleRateInHertz,
284            format,// word length, PCM
285            nativeChannelMask,
286            frameCount,
287            AUDIO_OUTPUT_FLAG_NONE,
288            audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user)
289            0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
290            0,// shared mem
291            true,// thread can call Java
292            sessionId);// audio session ID
293        break;
294
295    case MODE_STATIC:
296        // AudioTrack is using shared memory
297
298        if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) {
299            ALOGE("Error creating AudioTrack in static mode: error creating mem heap base");
300            goto native_init_failure;
301        }
302
303        status = lpTrack->set(
304            atStreamType,// stream type
305            sampleRateInHertz,
306            format,// word length, PCM
307            nativeChannelMask,
308            frameCount,
309            AUDIO_OUTPUT_FLAG_NONE,
310            audioCallback, &(lpJniStorage->mCallbackData),//callback, callback data (user));
311            0,// notificationFrames == 0 since not using EVENT_MORE_DATA to feed the AudioTrack
312            lpJniStorage->mMemBase,// shared mem
313            true,// thread can call Java
314            sessionId);// audio session ID
315        break;
316
317    default:
318        ALOGE("Unknown mode %d", memoryMode);
319        goto native_init_failure;
320    }
321
322    if (status != NO_ERROR) {
323        ALOGE("Error %d initializing AudioTrack", status);
324        goto native_init_failure;
325    }
326
327    nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
328    if (nSession == NULL) {
329        ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
330        goto native_init_failure;
331    }
332    // read the audio session ID back from AudioTrack in case we create a new session
333    nSession[0] = lpTrack->getSessionId();
334    env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
335    nSession = NULL;
336
337    {   // scope for the lock
338        Mutex::Autolock l(sLock);
339        sAudioTrackCallBackCookies.add(&lpJniStorage->mCallbackData);
340    }
341    // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
342    // of the Java object (in mNativeTrackInJavaObj)
343    setAudioTrack(env, thiz, lpTrack);
344
345    // save the JNI resources so we can free them later
346    //ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage);
347    env->SetLongField(thiz, javaAudioTrackFields.jniData, (jlong)lpJniStorage);
348
349    return (jint) AUDIOTRACK_SUCCESS;
350
351    // failures:
352native_init_failure:
353    if (nSession != NULL) {
354        env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
355    }
356    env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class);
357    env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref);
358    delete lpJniStorage;
359    env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
360
361    return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
362}
363
364
365// ----------------------------------------------------------------------------
366static void
367android_media_AudioTrack_start(JNIEnv *env, jobject thiz)
368{
369    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
370    if (lpTrack == NULL) {
371        jniThrowException(env, "java/lang/IllegalStateException",
372            "Unable to retrieve AudioTrack pointer for start()");
373        return;
374    }
375
376    lpTrack->start();
377}
378
379
380// ----------------------------------------------------------------------------
381static void
382android_media_AudioTrack_stop(JNIEnv *env, jobject thiz)
383{
384    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
385    if (lpTrack == NULL) {
386        jniThrowException(env, "java/lang/IllegalStateException",
387            "Unable to retrieve AudioTrack pointer for stop()");
388        return;
389    }
390
391    lpTrack->stop();
392}
393
394
395// ----------------------------------------------------------------------------
396static void
397android_media_AudioTrack_pause(JNIEnv *env, jobject thiz)
398{
399    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
400    if (lpTrack == NULL) {
401        jniThrowException(env, "java/lang/IllegalStateException",
402            "Unable to retrieve AudioTrack pointer for pause()");
403        return;
404    }
405
406    lpTrack->pause();
407}
408
409
410// ----------------------------------------------------------------------------
411static void
412android_media_AudioTrack_flush(JNIEnv *env, jobject thiz)
413{
414    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
415    if (lpTrack == NULL) {
416        jniThrowException(env, "java/lang/IllegalStateException",
417            "Unable to retrieve AudioTrack pointer for flush()");
418        return;
419    }
420
421    lpTrack->flush();
422}
423
424// ----------------------------------------------------------------------------
425static void
426android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol )
427{
428    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
429    if (lpTrack == NULL) {
430        jniThrowException(env, "java/lang/IllegalStateException",
431            "Unable to retrieve AudioTrack pointer for setVolume()");
432        return;
433    }
434
435    lpTrack->setVolume(leftVol, rightVol);
436}
437
438// ----------------------------------------------------------------------------
439
440#define CALLBACK_COND_WAIT_TIMEOUT_MS 1000
441static void android_media_AudioTrack_release(JNIEnv *env,  jobject thiz) {
442    sp<AudioTrack> lpTrack = setAudioTrack(env, thiz, 0);
443    if (lpTrack == NULL) {
444        return;
445    }
446    //ALOGV("deleting lpTrack: %x\n", (int)lpTrack);
447    lpTrack->stop();
448
449    // delete the JNI data
450    AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField(
451        thiz, javaAudioTrackFields.jniData);
452    // reset the native resources in the Java object so any attempt to access
453    // them after a call to release fails.
454    env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
455
456    if (pJniStorage) {
457        Mutex::Autolock l(sLock);
458        audiotrack_callback_cookie *lpCookie = &pJniStorage->mCallbackData;
459        //ALOGV("deleting pJniStorage: %x\n", (int)pJniStorage);
460        while (lpCookie->busy) {
461            if (lpCookie->cond.waitRelative(sLock,
462                                            milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) !=
463                                                    NO_ERROR) {
464                break;
465            }
466        }
467        sAudioTrackCallBackCookies.remove(lpCookie);
468        // delete global refs created in native_setup
469        env->DeleteGlobalRef(lpCookie->audioTrack_class);
470        env->DeleteGlobalRef(lpCookie->audioTrack_ref);
471        delete pJniStorage;
472    }
473}
474
475
476// ----------------------------------------------------------------------------
477static void android_media_AudioTrack_finalize(JNIEnv *env,  jobject thiz) {
478    //ALOGV("android_media_AudioTrack_finalize jobject: %x\n", (int)thiz);
479    android_media_AudioTrack_release(env, thiz);
480}
481
482// ----------------------------------------------------------------------------
483jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const jbyte* data,
484                  jint offsetInBytes, jint sizeInBytes, bool blocking = true) {
485    // give the data to the native AudioTrack object (the data starts at the offset)
486    ssize_t written = 0;
487    // regular write() or copy the data to the AudioTrack's shared memory?
488    if (track->sharedBuffer() == 0) {
489        written = track->write(data + offsetInBytes, sizeInBytes, blocking);
490        // for compatibility with earlier behavior of write(), return 0 in this case
491        if (written == (ssize_t) WOULD_BLOCK) {
492            written = 0;
493        }
494    } else {
495        const audio_format_t format = audioFormatToNative(audioFormat);
496        switch (format) {
497
498        default:
499            // TODO Currently the only possible values for format are AUDIO_FORMAT_PCM_16_BIT,
500            // AUDIO_FORMAT_PCM_8_BIT, and AUDIO_FORMAT_PCM_FLOAT,
501            // due to the limited set of values for audioFormat.
502            // The next section of the switch will probably work for more formats, but it has only
503            // been tested for AUDIO_FORMAT_PCM_16_BIT and AUDIO_FORMAT_PCM_FLOAT,
504            // so that's why the "default" case fails.
505            break;
506
507        case AUDIO_FORMAT_PCM_FLOAT:
508        case AUDIO_FORMAT_PCM_16_BIT: {
509            // writing to shared memory, check for capacity
510            if ((size_t)sizeInBytes > track->sharedBuffer()->size()) {
511                sizeInBytes = track->sharedBuffer()->size();
512            }
513            memcpy(track->sharedBuffer()->pointer(), data + offsetInBytes, sizeInBytes);
514            written = sizeInBytes;
515            } break;
516
517        case AUDIO_FORMAT_PCM_8_BIT: {
518            // data contains 8bit data we need to expand to 16bit before copying
519            // to the shared memory
520            // writing to shared memory, check for capacity,
521            // note that input data will occupy 2X the input space due to 8 to 16bit conversion
522            if (((size_t)sizeInBytes)*2 > track->sharedBuffer()->size()) {
523                sizeInBytes = track->sharedBuffer()->size() / 2;
524            }
525            int count = sizeInBytes;
526            int16_t *dst = (int16_t *)track->sharedBuffer()->pointer();
527            const uint8_t *src = (const uint8_t *)(data + offsetInBytes);
528            memcpy_to_i16_from_u8(dst, src, count);
529            // even though we wrote 2*sizeInBytes, we only report sizeInBytes as written to hide
530            // the 8bit mixer restriction from the user of this function
531            written = sizeInBytes;
532            } break;
533
534        }
535    }
536    return written;
537
538}
539
540// ----------------------------------------------------------------------------
541static jint android_media_AudioTrack_write_byte(JNIEnv *env,  jobject thiz,
542                                                  jbyteArray javaAudioData,
543                                                  jint offsetInBytes, jint sizeInBytes,
544                                                  jint javaAudioFormat,
545                                                  jboolean isWriteBlocking) {
546    //ALOGV("android_media_AudioTrack_write_byte(offset=%d, sizeInBytes=%d) called",
547    //    offsetInBytes, sizeInBytes);
548    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
549    if (lpTrack == NULL) {
550        jniThrowException(env, "java/lang/IllegalStateException",
551            "Unable to retrieve AudioTrack pointer for write()");
552        return 0;
553    }
554
555    // get the pointer for the audio data from the java array
556    // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
557    // a way that it becomes much more efficient. When doing so, we will have to prevent the
558    // AudioSystem callback to be called while in critical section (in case of media server
559    // process crash for instance)
560    jbyte* cAudioData = NULL;
561    if (javaAudioData) {
562        cAudioData = (jbyte *)env->GetByteArrayElements(javaAudioData, NULL);
563        if (cAudioData == NULL) {
564            ALOGE("Error retrieving source of audio data to play, can't play");
565            return 0; // out of memory or no data to load
566        }
567    } else {
568        ALOGE("NULL java array of audio data to play, can't play");
569        return 0;
570    }
571
572    jint written = writeToTrack(lpTrack, javaAudioFormat, cAudioData, offsetInBytes, sizeInBytes,
573            isWriteBlocking == JNI_TRUE /* blocking */);
574
575    env->ReleaseByteArrayElements(javaAudioData, cAudioData, 0);
576
577    //ALOGV("write wrote %d (tried %d) bytes in the native AudioTrack with offset %d",
578    //     (int)written, (int)(sizeInBytes), (int)offsetInBytes);
579    return written;
580}
581
582
583// ----------------------------------------------------------------------------
584static jint android_media_AudioTrack_write_native_bytes(JNIEnv *env,  jobject thiz,
585        jbyteArray javaBytes, jint byteOffset, jint sizeInBytes,
586        jint javaAudioFormat, jboolean isWriteBlocking) {
587    //ALOGV("android_media_AudioTrack_write_native_bytes(offset=%d, sizeInBytes=%d) called",
588    //    offsetInBytes, sizeInBytes);
589    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
590    if (lpTrack == NULL) {
591        jniThrowException(env, "java/lang/IllegalStateException",
592                "Unable to retrieve AudioTrack pointer for write()");
593        return 0;
594    }
595
596    ScopedBytesRO bytes(env, javaBytes);
597    if (bytes.get() == NULL) {
598        ALOGE("Error retrieving source of audio data to play, can't play");
599        return AUDIOTRACK_ERROR_BAD_VALUE;
600    }
601
602    jint written = writeToTrack(lpTrack, javaAudioFormat, bytes.get(), byteOffset,
603            sizeInBytes, isWriteBlocking == JNI_TRUE /* blocking */);
604
605    return written;
606}
607
608// ----------------------------------------------------------------------------
609static jint android_media_AudioTrack_write_short(JNIEnv *env,  jobject thiz,
610                                                  jshortArray javaAudioData,
611                                                  jint offsetInShorts, jint sizeInShorts,
612                                                  jint javaAudioFormat) {
613
614    //ALOGV("android_media_AudioTrack_write_short(offset=%d, sizeInShorts=%d) called",
615    //    offsetInShorts, sizeInShorts);
616    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
617    if (lpTrack == NULL) {
618        jniThrowException(env, "java/lang/IllegalStateException",
619            "Unable to retrieve AudioTrack pointer for write()");
620        return 0;
621    }
622
623    // get the pointer for the audio data from the java array
624    // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
625    // a way that it becomes much more efficient. When doing so, we will have to prevent the
626    // AudioSystem callback to be called while in critical section (in case of media server
627    // process crash for instance)
628    jshort* cAudioData = NULL;
629    if (javaAudioData) {
630        cAudioData = (jshort *)env->GetShortArrayElements(javaAudioData, NULL);
631        if (cAudioData == NULL) {
632            ALOGE("Error retrieving source of audio data to play, can't play");
633            return 0; // out of memory or no data to load
634        }
635    } else {
636        ALOGE("NULL java array of audio data to play, can't play");
637        return 0;
638    }
639    jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData,
640                                offsetInShorts * sizeof(short), sizeInShorts * sizeof(short),
641            true /*blocking write, legacy behavior*/);
642    env->ReleaseShortArrayElements(javaAudioData, cAudioData, 0);
643
644    if (written > 0) {
645        written /= sizeof(short);
646    }
647    //ALOGV("write wrote %d (tried %d) shorts in the native AudioTrack with offset %d",
648    //     (int)written, (int)(sizeInShorts), (int)offsetInShorts);
649
650    return written;
651}
652
653
654// ----------------------------------------------------------------------------
655static jint android_media_AudioTrack_write_float(JNIEnv *env,  jobject thiz,
656                                                  jfloatArray javaAudioData,
657                                                  jint offsetInFloats, jint sizeInFloats,
658                                                  jint javaAudioFormat,
659                                                  jboolean isWriteBlocking) {
660
661    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
662    if (lpTrack == NULL) {
663        jniThrowException(env, "java/lang/IllegalStateException",
664            "Unable to retrieve AudioTrack pointer for write()");
665        return 0;
666    }
667
668    jfloat* cAudioData = NULL;
669    if (javaAudioData) {
670        cAudioData = (jfloat *)env->GetFloatArrayElements(javaAudioData, NULL);
671        if (cAudioData == NULL) {
672            ALOGE("Error retrieving source of audio data to play, can't play");
673            return 0; // out of memory or no data to load
674        }
675    } else {
676        ALOGE("NULL java array of audio data to play, can't play");
677        return 0;
678    }
679    jint written = writeToTrack(lpTrack, javaAudioFormat, (jbyte *)cAudioData,
680                                offsetInFloats * sizeof(float), sizeInFloats * sizeof(float),
681                                isWriteBlocking == JNI_TRUE /* blocking */);
682    env->ReleaseFloatArrayElements(javaAudioData, cAudioData, 0);
683
684    if (written > 0) {
685        written /= sizeof(float);
686    }
687
688    return written;
689}
690
691
692// ----------------------------------------------------------------------------
693static jint android_media_AudioTrack_get_native_frame_count(JNIEnv *env,  jobject thiz) {
694    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
695    if (lpTrack == NULL) {
696        jniThrowException(env, "java/lang/IllegalStateException",
697            "Unable to retrieve AudioTrack pointer for frameCount()");
698        return AUDIOTRACK_ERROR;
699    }
700
701    return lpTrack->frameCount();
702}
703
704
705// ----------------------------------------------------------------------------
706static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env,  jobject thiz,
707        jint sampleRateInHz) {
708    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
709    if (lpTrack == NULL) {
710        jniThrowException(env, "java/lang/IllegalStateException",
711            "Unable to retrieve AudioTrack pointer for setSampleRate()");
712        return AUDIOTRACK_ERROR;
713    }
714    return android_media_translateErrorCode(lpTrack->setSampleRate(sampleRateInHz));
715}
716
717
718// ----------------------------------------------------------------------------
719static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env,  jobject thiz) {
720    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
721    if (lpTrack == NULL) {
722        jniThrowException(env, "java/lang/IllegalStateException",
723            "Unable to retrieve AudioTrack pointer for getSampleRate()");
724        return AUDIOTRACK_ERROR;
725    }
726    return (jint) lpTrack->getSampleRate();
727}
728
729
730// ----------------------------------------------------------------------------
731static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env,  jobject thiz,
732        jint markerPos) {
733    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
734    if (lpTrack == NULL) {
735        jniThrowException(env, "java/lang/IllegalStateException",
736            "Unable to retrieve AudioTrack pointer for setMarkerPosition()");
737        return AUDIOTRACK_ERROR;
738    }
739    return android_media_translateErrorCode( lpTrack->setMarkerPosition(markerPos) );
740}
741
742
743// ----------------------------------------------------------------------------
744static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env,  jobject thiz) {
745    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
746    uint32_t markerPos = 0;
747
748    if (lpTrack == NULL) {
749        jniThrowException(env, "java/lang/IllegalStateException",
750            "Unable to retrieve AudioTrack pointer for getMarkerPosition()");
751        return AUDIOTRACK_ERROR;
752    }
753    lpTrack->getMarkerPosition(&markerPos);
754    return (jint)markerPos;
755}
756
757
758// ----------------------------------------------------------------------------
759static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env,  jobject thiz,
760        jint period) {
761    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
762    if (lpTrack == NULL) {
763        jniThrowException(env, "java/lang/IllegalStateException",
764            "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()");
765        return AUDIOTRACK_ERROR;
766    }
767    return android_media_translateErrorCode( lpTrack->setPositionUpdatePeriod(period) );
768}
769
770
771// ----------------------------------------------------------------------------
772static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env,  jobject thiz) {
773    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
774    uint32_t period = 0;
775
776    if (lpTrack == NULL) {
777        jniThrowException(env, "java/lang/IllegalStateException",
778            "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()");
779        return AUDIOTRACK_ERROR;
780    }
781    lpTrack->getPositionUpdatePeriod(&period);
782    return (jint)period;
783}
784
785
786// ----------------------------------------------------------------------------
787static jint android_media_AudioTrack_set_position(JNIEnv *env,  jobject thiz,
788        jint position) {
789    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
790    if (lpTrack == NULL) {
791        jniThrowException(env, "java/lang/IllegalStateException",
792            "Unable to retrieve AudioTrack pointer for setPosition()");
793        return AUDIOTRACK_ERROR;
794    }
795    return android_media_translateErrorCode( lpTrack->setPosition(position) );
796}
797
798
799// ----------------------------------------------------------------------------
800static jint android_media_AudioTrack_get_position(JNIEnv *env,  jobject thiz) {
801    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
802    uint32_t position = 0;
803
804    if (lpTrack == NULL) {
805        jniThrowException(env, "java/lang/IllegalStateException",
806            "Unable to retrieve AudioTrack pointer for getPosition()");
807        return AUDIOTRACK_ERROR;
808    }
809    lpTrack->getPosition(&position);
810    return (jint)position;
811}
812
813
814// ----------------------------------------------------------------------------
815static jint android_media_AudioTrack_get_latency(JNIEnv *env,  jobject thiz) {
816    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
817
818    if (lpTrack == NULL) {
819        jniThrowException(env, "java/lang/IllegalStateException",
820            "Unable to retrieve AudioTrack pointer for latency()");
821        return AUDIOTRACK_ERROR;
822    }
823    return (jint)lpTrack->latency();
824}
825
826
827// ----------------------------------------------------------------------------
828static jint android_media_AudioTrack_get_timestamp(JNIEnv *env,  jobject thiz, jlongArray jTimestamp) {
829    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
830
831    if (lpTrack == NULL) {
832        ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
833        return AUDIOTRACK_ERROR;
834    }
835    AudioTimestamp timestamp;
836    status_t status = lpTrack->getTimestamp(timestamp);
837    if (status == OK) {
838        jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL);
839        if (nTimestamp == NULL) {
840            ALOGE("Unable to get array for getTimestamp()");
841            return AUDIOTRACK_ERROR;
842        }
843        nTimestamp[0] = (jlong) timestamp.mPosition;
844        nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec);
845        env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0);
846    }
847    return (jint) android_media_translateErrorCode(status);
848}
849
850
851// ----------------------------------------------------------------------------
852static jint android_media_AudioTrack_set_loop(JNIEnv *env,  jobject thiz,
853        jint loopStart, jint loopEnd, jint loopCount) {
854    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
855    if (lpTrack == NULL) {
856        jniThrowException(env, "java/lang/IllegalStateException",
857            "Unable to retrieve AudioTrack pointer for setLoop()");
858        return AUDIOTRACK_ERROR;
859    }
860    return android_media_translateErrorCode( lpTrack->setLoop(loopStart, loopEnd, loopCount) );
861}
862
863
864// ----------------------------------------------------------------------------
865static jint android_media_AudioTrack_reload(JNIEnv *env,  jobject thiz) {
866    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
867    if (lpTrack == NULL) {
868        jniThrowException(env, "java/lang/IllegalStateException",
869            "Unable to retrieve AudioTrack pointer for reload()");
870        return AUDIOTRACK_ERROR;
871    }
872    return android_media_translateErrorCode( lpTrack->reload() );
873}
874
875
876// ----------------------------------------------------------------------------
877static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env,  jobject thiz,
878        jint javaStreamType) {
879    uint32_t afSamplingRate;
880    // convert the stream type from Java to native value
881    // FIXME: code duplication with android_media_AudioTrack_setup()
882    audio_stream_type_t nativeStreamType;
883    switch (javaStreamType) {
884    case AUDIO_STREAM_VOICE_CALL:
885    case AUDIO_STREAM_SYSTEM:
886    case AUDIO_STREAM_RING:
887    case AUDIO_STREAM_MUSIC:
888    case AUDIO_STREAM_ALARM:
889    case AUDIO_STREAM_NOTIFICATION:
890    case AUDIO_STREAM_BLUETOOTH_SCO:
891    case AUDIO_STREAM_DTMF:
892        nativeStreamType = (audio_stream_type_t) javaStreamType;
893        break;
894    default:
895        nativeStreamType = AUDIO_STREAM_DEFAULT;
896        break;
897    }
898
899    status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType);
900    if (status != NO_ERROR) {
901        ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d "
902              "in AudioTrack JNI", status, nativeStreamType);
903        return DEFAULT_OUTPUT_SAMPLE_RATE;
904    } else {
905        return afSamplingRate;
906    }
907}
908
909
910// ----------------------------------------------------------------------------
911// returns the minimum required size for the successful creation of a streaming AudioTrack
912// returns -1 if there was an error querying the hardware.
913static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env,  jobject thiz,
914    jint sampleRateInHertz, jint channelCount, jint audioFormat) {
915
916    size_t frameCount;
917    const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT,
918            sampleRateInHertz);
919    if (status != NO_ERROR) {
920        ALOGE("AudioTrack::getMinFrameCount() for sample rate %d failed with status %d",
921                sampleRateInHertz, status);
922        return -1;
923    }
924    const audio_format_t format = audioFormatToNative(audioFormat);
925    const size_t bytesPerSample = audio_bytes_per_sample(format);
926    return frameCount * channelCount * bytesPerSample;
927}
928
929// ----------------------------------------------------------------------------
930static jint
931android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level )
932{
933    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
934    if (lpTrack == NULL ) {
935        jniThrowException(env, "java/lang/IllegalStateException",
936            "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()");
937        return -1;
938    }
939
940    status_t status = lpTrack->setAuxEffectSendLevel(level);
941    if (status != NO_ERROR) {
942        ALOGE("AudioTrack::setAuxEffectSendLevel() for level %g failed with status %d",
943                level, status);
944    }
945    return (jint) status;
946}
947
948// ----------------------------------------------------------------------------
949static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env,  jobject thiz,
950        jint effectId) {
951    sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
952    if (lpTrack == NULL) {
953        jniThrowException(env, "java/lang/IllegalStateException",
954            "Unable to retrieve AudioTrack pointer for attachAuxEffect()");
955        return AUDIOTRACK_ERROR;
956    }
957    return android_media_translateErrorCode( lpTrack->attachAuxEffect(effectId) );
958}
959
960// ----------------------------------------------------------------------------
961// ----------------------------------------------------------------------------
962static JNINativeMethod gMethods[] = {
963    // name,              signature,     funcPtr
964    {"native_start",         "()V",      (void *)android_media_AudioTrack_start},
965    {"native_stop",          "()V",      (void *)android_media_AudioTrack_stop},
966    {"native_pause",         "()V",      (void *)android_media_AudioTrack_pause},
967    {"native_flush",         "()V",      (void *)android_media_AudioTrack_flush},
968    {"native_setup",         "(Ljava/lang/Object;IIIIII[I)I",
969                                         (void *)android_media_AudioTrack_setup},
970    {"native_finalize",      "()V",      (void *)android_media_AudioTrack_finalize},
971    {"native_release",       "()V",      (void *)android_media_AudioTrack_release},
972    {"native_write_byte",    "([BIIIZ)I",(void *)android_media_AudioTrack_write_byte},
973    {"native_write_native_bytes",
974                             "(Ljava/lang/Object;IIIZ)I",
975                                         (void *)android_media_AudioTrack_write_native_bytes},
976    {"native_write_short",   "([SIII)I", (void *)android_media_AudioTrack_write_short},
977    {"native_write_float",   "([FIIIZ)I",(void *)android_media_AudioTrack_write_float},
978    {"native_setVolume",     "(FF)V",    (void *)android_media_AudioTrack_set_volume},
979    {"native_get_native_frame_count",
980                             "()I",      (void *)android_media_AudioTrack_get_native_frame_count},
981    {"native_set_playback_rate",
982                             "(I)I",     (void *)android_media_AudioTrack_set_playback_rate},
983    {"native_get_playback_rate",
984                             "()I",      (void *)android_media_AudioTrack_get_playback_rate},
985    {"native_set_marker_pos","(I)I",     (void *)android_media_AudioTrack_set_marker_pos},
986    {"native_get_marker_pos","()I",      (void *)android_media_AudioTrack_get_marker_pos},
987    {"native_set_pos_update_period",
988                             "(I)I",     (void *)android_media_AudioTrack_set_pos_update_period},
989    {"native_get_pos_update_period",
990                             "()I",      (void *)android_media_AudioTrack_get_pos_update_period},
991    {"native_set_position",  "(I)I",     (void *)android_media_AudioTrack_set_position},
992    {"native_get_position",  "()I",      (void *)android_media_AudioTrack_get_position},
993    {"native_get_latency",   "()I",      (void *)android_media_AudioTrack_get_latency},
994    {"native_get_timestamp", "([J)I",    (void *)android_media_AudioTrack_get_timestamp},
995    {"native_set_loop",      "(III)I",   (void *)android_media_AudioTrack_set_loop},
996    {"native_reload_static", "()I",      (void *)android_media_AudioTrack_reload},
997    {"native_get_output_sample_rate",
998                             "(I)I",      (void *)android_media_AudioTrack_get_output_sample_rate},
999    {"native_get_min_buff_size",
1000                             "(III)I",   (void *)android_media_AudioTrack_get_min_buff_size},
1001    {"native_setAuxEffectSendLevel",
1002                             "(F)I",     (void *)android_media_AudioTrack_setAuxEffectSendLevel},
1003    {"native_attachAuxEffect",
1004                             "(I)I",     (void *)android_media_AudioTrack_attachAuxEffect},
1005};
1006
1007
1008// field names found in android/media/AudioTrack.java
1009#define JAVA_POSTEVENT_CALLBACK_NAME                    "postEventFromNative"
1010#define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME            "mNativeTrackInJavaObj"
1011#define JAVA_JNIDATA_FIELD_NAME                         "mJniData"
1012
1013// ----------------------------------------------------------------------------
1014// preconditions:
1015//    theClass is valid
1016bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className,
1017                             const char* constName, int* constVal) {
1018    jfieldID javaConst = NULL;
1019    javaConst = pEnv->GetStaticFieldID(theClass, constName, "I");
1020    if (javaConst != NULL) {
1021        *constVal = pEnv->GetStaticIntField(theClass, javaConst);
1022        return true;
1023    } else {
1024        ALOGE("Can't find %s.%s", className, constName);
1025        return false;
1026    }
1027}
1028
1029
1030// ----------------------------------------------------------------------------
1031int register_android_media_AudioTrack(JNIEnv *env)
1032{
1033    javaAudioTrackFields.nativeTrackInJavaObj = NULL;
1034    javaAudioTrackFields.postNativeEventInJava = NULL;
1035
1036    // Get the AudioTrack class
1037    jclass audioTrackClass = env->FindClass(kClassPathName);
1038    if (audioTrackClass == NULL) {
1039        ALOGE("Can't find %s", kClassPathName);
1040        return -1;
1041    }
1042
1043    // Get the postEvent method
1044    javaAudioTrackFields.postNativeEventInJava = env->GetStaticMethodID(
1045            audioTrackClass,
1046            JAVA_POSTEVENT_CALLBACK_NAME, "(Ljava/lang/Object;IIILjava/lang/Object;)V");
1047    if (javaAudioTrackFields.postNativeEventInJava == NULL) {
1048        ALOGE("Can't find AudioTrack.%s", JAVA_POSTEVENT_CALLBACK_NAME);
1049        return -1;
1050    }
1051
1052    // Get the variables fields
1053    //      nativeTrackInJavaObj
1054    javaAudioTrackFields.nativeTrackInJavaObj = env->GetFieldID(
1055            audioTrackClass,
1056            JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "J");
1057    if (javaAudioTrackFields.nativeTrackInJavaObj == NULL) {
1058        ALOGE("Can't find AudioTrack.%s", JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME);
1059        return -1;
1060    }
1061    //      jniData;
1062    javaAudioTrackFields.jniData = env->GetFieldID(
1063            audioTrackClass,
1064            JAVA_JNIDATA_FIELD_NAME, "J");
1065    if (javaAudioTrackFields.jniData == NULL) {
1066        ALOGE("Can't find AudioTrack.%s", JAVA_JNIDATA_FIELD_NAME);
1067        return -1;
1068    }
1069
1070    return AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods));
1071}
1072
1073
1074// ----------------------------------------------------------------------------
1075