1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.net.rtp;
18
19import android.app.ActivityThread;
20import android.media.AudioManager;
21
22import java.util.HashMap;
23import java.util.Locale;
24import java.util.Map;
25
26/**
27 * An AudioGroup is an audio hub for the speaker, the microphone, and
28 * {@link AudioStream}s. Each of these components can be logically turned on
29 * or off by calling {@link #setMode(int)} or {@link RtpStream#setMode(int)}.
30 * The AudioGroup will go through these components and process them one by one
31 * within its execution loop. The loop consists of four steps. First, for each
32 * AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
33 * packets and stores in its buffer. Then, if the microphone is enabled,
34 * processes the recorded audio and stores in its buffer. Third, if the speaker
35 * is enabled, mixes all AudioStream buffers and plays back. Finally, for each
36 * AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
37 * buffers and sends back the encoded packets. An AudioGroup does nothing if
38 * there is no AudioStream in it.
39 *
40 * <p>Few things must be noticed before using these classes. The performance is
41 * highly related to the system load and the network bandwidth. Usually a
42 * simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
43 * bandwidth, and vise versa. Using two AudioStreams at the same time doubles
44 * not only the load but also the bandwidth. The condition varies from one
45 * device to another, and developers should choose the right combination in
46 * order to get the best result.</p>
47 *
48 * <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
49 * example, a Voice over IP (VoIP) application might want to put a conference
50 * call on hold in order to make a new call but still allow people in the
51 * conference call talking to each other. This can be done easily using two
52 * AudioGroups, but there are some limitations. Since the speaker and the
53 * microphone are globally shared resources, only one AudioGroup at a time is
54 * allowed to run in a mode other than {@link #MODE_ON_HOLD}. The others will
55 * be unable to acquire these resources and fail silently.</p>
56 *
57 * <p class="note">Using this class requires
58 * {@link android.Manifest.permission#RECORD_AUDIO} permission. Developers
59 * should set the audio mode to {@link AudioManager#MODE_IN_COMMUNICATION}
60 * using {@link AudioManager#setMode(int)} and change it back when none of
61 * the AudioGroups is in use.</p>
62 *
63 * @see AudioStream
64 */
65public class AudioGroup {
66    /**
67     * This mode is similar to {@link #MODE_NORMAL} except the speaker and
68     * the microphone are both disabled.
69     */
70    public static final int MODE_ON_HOLD = 0;
71
72    /**
73     * This mode is similar to {@link #MODE_NORMAL} except the microphone is
74     * disabled.
75     */
76    public static final int MODE_MUTED = 1;
77
78    /**
79     * This mode indicates that the speaker, the microphone, and all
80     * {@link AudioStream}s in the group are enabled. First, the packets
81     * received from the streams are decoded and mixed with the audio recorded
82     * from the microphone. Then, the results are played back to the speaker,
83     * encoded and sent back to each stream.
84     */
85    public static final int MODE_NORMAL = 2;
86
87    /**
88     * This mode is similar to {@link #MODE_NORMAL} except the echo suppression
89     * is enabled. It should be only used when the speaker phone is on.
90     */
91    public static final int MODE_ECHO_SUPPRESSION = 3;
92
93    private static final int MODE_LAST = 3;
94
95    private final Map<AudioStream, Long> mStreams;
96    private int mMode = MODE_ON_HOLD;
97
98    private long mNative;
99    static {
100        System.loadLibrary("rtp_jni");
101    }
102
103    /**
104     * Creates an empty AudioGroup.
105     */
106    public AudioGroup() {
107        mStreams = new HashMap<AudioStream, Long>();
108    }
109
110    /**
111     * Returns the {@link AudioStream}s in this group.
112     */
113    public AudioStream[] getStreams() {
114        synchronized (this) {
115            return mStreams.keySet().toArray(new AudioStream[mStreams.size()]);
116        }
117    }
118
119    /**
120     * Returns the current mode.
121     */
122    public int getMode() {
123        return mMode;
124    }
125
126    /**
127     * Changes the current mode. It must be one of {@link #MODE_ON_HOLD},
128     * {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and
129     * {@link #MODE_ECHO_SUPPRESSION}.
130     *
131     * @param mode The mode to change to.
132     * @throws IllegalArgumentException if the mode is invalid.
133     */
134    public void setMode(int mode) {
135        if (mode < 0 || mode > MODE_LAST) {
136            throw new IllegalArgumentException("Invalid mode");
137        }
138        synchronized (this) {
139            nativeSetMode(mode);
140            mMode = mode;
141        }
142    }
143
144    private native void nativeSetMode(int mode);
145
146    // Package-private method used by AudioStream.join().
147    synchronized void add(AudioStream stream) {
148        if (!mStreams.containsKey(stream)) {
149            try {
150                AudioCodec codec = stream.getCodec();
151                String codecSpec = String.format(Locale.US, "%d %s %s", codec.type,
152                        codec.rtpmap, codec.fmtp);
153                long id = nativeAdd(stream.getMode(), stream.getSocket(),
154                        stream.getRemoteAddress().getHostAddress(),
155                        stream.getRemotePort(), codecSpec, stream.getDtmfType(),
156                        ActivityThread.currentOpPackageName());
157                mStreams.put(stream, id);
158            } catch (NullPointerException e) {
159                throw new IllegalStateException(e);
160            }
161        }
162    }
163
164    private native long nativeAdd(int mode, int socket, String remoteAddress,
165            int remotePort, String codecSpec, int dtmfType, String opPackageName);
166
167    // Package-private method used by AudioStream.join().
168    synchronized void remove(AudioStream stream) {
169        Long id = mStreams.remove(stream);
170        if (id != null) {
171            nativeRemove(id);
172        }
173    }
174
175    private native void nativeRemove(long id);
176
177    /**
178     * Sends a DTMF digit to every {@link AudioStream} in this group. Currently
179     * only event {@code 0} to {@code 15} are supported.
180     *
181     * @throws IllegalArgumentException if the event is invalid.
182     */
183    public void sendDtmf(int event) {
184        if (event < 0 || event > 15) {
185            throw new IllegalArgumentException("Invalid event");
186        }
187        synchronized (this) {
188            nativeSendDtmf(event);
189        }
190    }
191
192    private native void nativeSendDtmf(int event);
193
194    /**
195     * Removes every {@link AudioStream} in this group.
196     */
197    public void clear() {
198        for (AudioStream stream : getStreams()) {
199            stream.join(null);
200        }
201    }
202
203    @Override
204    protected void finalize() throws Throwable {
205        nativeRemove(0L);
206        super.finalize();
207    }
208}
209