AudioRecordInput.java revision a3f6a49ab37290eeeb8db0f41ec0f1cb74a68be7
1// Copyright 2013 The Chromium Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5package org.chromium.media; 6 7import android.media.AudioFormat; 8import android.media.AudioRecord; 9import android.media.MediaRecorder.AudioSource; 10import android.os.Process; 11import android.util.Log; 12 13import org.chromium.base.CalledByNative; 14import org.chromium.base.JNINamespace; 15 16import java.nio.ByteBuffer; 17 18// Owned by its native counterpart declared in audio_record_input.h. Refer to 19// that class for general comments. 20@JNINamespace("media") 21class AudioRecordInput { 22 private static final String TAG = "AudioRecordInput"; 23 // We are unable to obtain a precise measurement of the hardware delay on 24 // Android. This is a conservative lower-bound based on measurments. It 25 // could surely be tightened with further testing. 26 private static final int HARDWARE_DELAY_MS = 100; 27 28 private final long mNativeAudioRecordInputStream; 29 private final int mSampleRate; 30 private final int mChannels; 31 private final int mBitsPerSample; 32 private final int mHardwareDelayBytes; 33 private ByteBuffer mBuffer; 34 private AudioRecord mAudioRecord; 35 private AudioRecordThread mAudioRecordThread; 36 37 private class AudioRecordThread extends Thread { 38 // The "volatile" synchronization technique is discussed here: 39 // http://stackoverflow.com/a/106787/299268 40 // and more generally in this article: 41 // https://www.ibm.com/developerworks/java/library/j-jtp06197/ 42 private volatile boolean mKeepAlive = true; 43 44 @Override 45 public void run() { 46 try { 47 Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); 48 } catch (IllegalArgumentException e) { 49 Log.wtf(TAG, "setThreadPriority failed", e); 50 } catch (SecurityException e) { 51 Log.wtf(TAG, "setThreadPriority failed", e); 52 } 53 try { 54 mAudioRecord.startRecording(); 55 } catch (IllegalStateException e) { 56 Log.e(TAG, "startRecording failed", e); 57 return; 58 } 59 60 while (mKeepAlive) { 61 int bytesRead = mAudioRecord.read(mBuffer, mBuffer.capacity()); 62 if (bytesRead > 0) { 63 nativeOnData(mNativeAudioRecordInputStream, bytesRead, 64 mHardwareDelayBytes); 65 } else { 66 Log.e(TAG, "read failed: " + bytesRead); 67 } 68 } 69 70 try { 71 mAudioRecord.stop(); 72 } catch (IllegalStateException e) { 73 Log.e(TAG, "stop failed", e); 74 } 75 } 76 77 public void joinRecordThread() { 78 mKeepAlive = false; 79 while (isAlive()) { 80 try { 81 join(); 82 } catch (InterruptedException e) { 83 // Ignore. 84 } 85 } 86 } 87 } 88 89 @CalledByNative 90 private static AudioRecordInput createAudioRecordInput(long nativeAudioRecordInputStream, 91 int sampleRate, int channels, int bitsPerSample, int bytesPerBuffer) { 92 return new AudioRecordInput(nativeAudioRecordInputStream, sampleRate, channels, 93 bitsPerSample, bytesPerBuffer); 94 } 95 96 private AudioRecordInput(long nativeAudioRecordInputStream, int sampleRate, int channels, 97 int bitsPerSample, int bytesPerBuffer) { 98 mNativeAudioRecordInputStream = nativeAudioRecordInputStream; 99 mSampleRate = sampleRate; 100 mChannels = channels; 101 mBitsPerSample = bitsPerSample; 102 mHardwareDelayBytes = HARDWARE_DELAY_MS * sampleRate / 1000 * bitsPerSample / 8; 103 104 // We use a direct buffer so that the native class can have access to 105 // the underlying memory address. This avoids the need to copy from a 106 // jbyteArray to native memory. More discussion of this here: 107 // http://developer.android.com/training/articles/perf-jni.html 108 try { 109 mBuffer = ByteBuffer.allocateDirect(bytesPerBuffer); 110 } catch (IllegalArgumentException e) { 111 Log.wtf(TAG, "allocateDirect failure", e); 112 } 113 // Rather than passing the ByteBuffer with every OnData call (requiring 114 // the potentially expensive GetDirectBufferAddress) we simply have the 115 // the native class cache the address to the memory once. 116 // 117 // Unfortunately, profiling with traceview was unable to either confirm 118 // or deny the advantage of this approach, as the values for 119 // nativeOnData() were not stable across runs. 120 nativeCacheDirectBufferAddress(mNativeAudioRecordInputStream, mBuffer); 121 } 122 123 @CalledByNative 124 private boolean open() { 125 if (mAudioRecord != null) { 126 Log.e(TAG, "open() called twice without a close()"); 127 return false; 128 } 129 int channelConfig; 130 if (mChannels == 1) { 131 channelConfig = AudioFormat.CHANNEL_IN_MONO; 132 } else if (mChannels == 2) { 133 channelConfig = AudioFormat.CHANNEL_IN_STEREO; 134 } else { 135 Log.e(TAG, "Unsupported number of channels: " + mChannels); 136 return false; 137 } 138 139 int audioFormat; 140 if (mBitsPerSample == 8) { 141 audioFormat = AudioFormat.ENCODING_PCM_8BIT; 142 } else if (mBitsPerSample == 16) { 143 audioFormat = AudioFormat.ENCODING_PCM_16BIT; 144 } else { 145 Log.e(TAG, "Unsupported bits per sample: " + mBitsPerSample); 146 return false; 147 } 148 149 // TODO(ajm): Do we need to make this larger to avoid underruns? The 150 // Android documentation notes "this size doesn't guarantee a smooth 151 // recording under load". 152 int minBufferSize = AudioRecord.getMinBufferSize(mSampleRate, channelConfig, audioFormat); 153 if (minBufferSize < 0) { 154 Log.e(TAG, "getMinBufferSize error: " + minBufferSize); 155 return false; 156 } 157 158 // We will request mBuffer.capacity() with every read call. The 159 // underlying AudioRecord buffer should be at least this large. 160 int audioRecordBufferSizeInBytes = Math.max(mBuffer.capacity(), minBufferSize); 161 try { 162 // TODO(ajm): Allow other AudioSource types to be requested? 163 mAudioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, 164 mSampleRate, 165 channelConfig, 166 audioFormat, 167 audioRecordBufferSizeInBytes); 168 } catch (IllegalArgumentException e) { 169 Log.e(TAG, "AudioRecord failed", e); 170 return false; 171 } 172 173 return true; 174 } 175 176 @CalledByNative 177 private void start() { 178 if (mAudioRecord == null) { 179 Log.e(TAG, "start() called before open()."); 180 return; 181 } 182 if (mAudioRecordThread != null) { 183 // start() was already called. 184 return; 185 } 186 mAudioRecordThread = new AudioRecordThread(); 187 mAudioRecordThread.start(); 188 } 189 190 @CalledByNative 191 private void stop() { 192 if (mAudioRecordThread == null) { 193 // start() was never called, or stop() was already called. 194 return; 195 } 196 mAudioRecordThread.joinRecordThread(); 197 mAudioRecordThread = null; 198 } 199 200 @CalledByNative 201 private void close() { 202 if (mAudioRecordThread != null) { 203 Log.e(TAG, "close() called before stop()."); 204 return; 205 } 206 if (mAudioRecord == null) { 207 // open() was not called. 208 return; 209 } 210 mAudioRecord.release(); 211 mAudioRecord = null; 212 } 213 214 private native void nativeCacheDirectBufferAddress(long nativeAudioRecordInputStream, 215 ByteBuffer buffer); 216 private native void nativeOnData(long nativeAudioRecordInputStream, int size, 217 int hardwareDelayBytes); 218} 219