1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.net.rtp;
18
19import android.media.AudioManager;
20
21import java.util.HashMap;
22import java.util.Locale;
23import java.util.Map;
24
25/**
26 * An AudioGroup is an audio hub for the speaker, the microphone, and
27 * {@link AudioStream}s. Each of these components can be logically turned on
28 * or off by calling {@link #setMode(int)} or {@link RtpStream#setMode(int)}.
29 * The AudioGroup will go through these components and process them one by one
30 * within its execution loop. The loop consists of four steps. First, for each
31 * AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
32 * packets and stores in its buffer. Then, if the microphone is enabled,
33 * processes the recorded audio and stores in its buffer. Third, if the speaker
34 * is enabled, mixes all AudioStream buffers and plays back. Finally, for each
35 * AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
36 * buffers and sends back the encoded packets. An AudioGroup does nothing if
37 * there is no AudioStream in it.
38 *
39 * <p>Few things must be noticed before using these classes. The performance is
40 * highly related to the system load and the network bandwidth. Usually a
41 * simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
42 * bandwidth, and vise versa. Using two AudioStreams at the same time doubles
43 * not only the load but also the bandwidth. The condition varies from one
44 * device to another, and developers should choose the right combination in
45 * order to get the best result.</p>
46 *
47 * <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
48 * example, a Voice over IP (VoIP) application might want to put a conference
49 * call on hold in order to make a new call but still allow people in the
50 * conference call talking to each other. This can be done easily using two
51 * AudioGroups, but there are some limitations. Since the speaker and the
52 * microphone are globally shared resources, only one AudioGroup at a time is
53 * allowed to run in a mode other than {@link #MODE_ON_HOLD}. The others will
54 * be unable to acquire these resources and fail silently.</p>
55 *
56 * <p class="note">Using this class requires
57 * {@link android.Manifest.permission#RECORD_AUDIO} permission. Developers
58 * should set the audio mode to {@link AudioManager#MODE_IN_COMMUNICATION}
59 * using {@link AudioManager#setMode(int)} and change it back when none of
60 * the AudioGroups is in use.</p>
61 *
62 * @see AudioStream
63 */
64public class AudioGroup {
65    /**
66     * This mode is similar to {@link #MODE_NORMAL} except the speaker and
67     * the microphone are both disabled.
68     */
69    public static final int MODE_ON_HOLD = 0;
70
71    /**
72     * This mode is similar to {@link #MODE_NORMAL} except the microphone is
73     * disabled.
74     */
75    public static final int MODE_MUTED = 1;
76
77    /**
78     * This mode indicates that the speaker, the microphone, and all
79     * {@link AudioStream}s in the group are enabled. First, the packets
80     * received from the streams are decoded and mixed with the audio recorded
81     * from the microphone. Then, the results are played back to the speaker,
82     * encoded and sent back to each stream.
83     */
84    public static final int MODE_NORMAL = 2;
85
86    /**
87     * This mode is similar to {@link #MODE_NORMAL} except the echo suppression
88     * is enabled. It should be only used when the speaker phone is on.
89     */
90    public static final int MODE_ECHO_SUPPRESSION = 3;
91
92    private static final int MODE_LAST = 3;
93
94    private final Map<AudioStream, Long> mStreams;
95    private int mMode = MODE_ON_HOLD;
96
97    private long mNative;
98    static {
99        System.loadLibrary("rtp_jni");
100    }
101
102    /**
103     * Creates an empty AudioGroup.
104     */
105    public AudioGroup() {
106        mStreams = new HashMap<AudioStream, Long>();
107    }
108
109    /**
110     * Returns the {@link AudioStream}s in this group.
111     */
112    public AudioStream[] getStreams() {
113        synchronized (this) {
114            return mStreams.keySet().toArray(new AudioStream[mStreams.size()]);
115        }
116    }
117
118    /**
119     * Returns the current mode.
120     */
121    public int getMode() {
122        return mMode;
123    }
124
125    /**
126     * Changes the current mode. It must be one of {@link #MODE_ON_HOLD},
127     * {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and
128     * {@link #MODE_ECHO_SUPPRESSION}.
129     *
130     * @param mode The mode to change to.
131     * @throws IllegalArgumentException if the mode is invalid.
132     */
133    public void setMode(int mode) {
134        if (mode < 0 || mode > MODE_LAST) {
135            throw new IllegalArgumentException("Invalid mode");
136        }
137        synchronized (this) {
138            nativeSetMode(mode);
139            mMode = mode;
140        }
141    }
142
143    private native void nativeSetMode(int mode);
144
145    // Package-private method used by AudioStream.join().
146    synchronized void add(AudioStream stream) {
147        if (!mStreams.containsKey(stream)) {
148            try {
149                AudioCodec codec = stream.getCodec();
150                String codecSpec = String.format(Locale.US, "%d %s %s", codec.type,
151                        codec.rtpmap, codec.fmtp);
152                long id = nativeAdd(stream.getMode(), stream.getSocket(),
153                        stream.getRemoteAddress().getHostAddress(),
154                        stream.getRemotePort(), codecSpec, stream.getDtmfType());
155                mStreams.put(stream, id);
156            } catch (NullPointerException e) {
157                throw new IllegalStateException(e);
158            }
159        }
160    }
161
162    private native long nativeAdd(int mode, int socket, String remoteAddress,
163            int remotePort, String codecSpec, int dtmfType);
164
165    // Package-private method used by AudioStream.join().
166    synchronized void remove(AudioStream stream) {
167        Long id = mStreams.remove(stream);
168        if (id != null) {
169            nativeRemove(id);
170        }
171    }
172
173    private native void nativeRemove(long id);
174
175    /**
176     * Sends a DTMF digit to every {@link AudioStream} in this group. Currently
177     * only event {@code 0} to {@code 15} are supported.
178     *
179     * @throws IllegalArgumentException if the event is invalid.
180     */
181    public void sendDtmf(int event) {
182        if (event < 0 || event > 15) {
183            throw new IllegalArgumentException("Invalid event");
184        }
185        synchronized (this) {
186            nativeSendDtmf(event);
187        }
188    }
189
190    private native void nativeSendDtmf(int event);
191
192    /**
193     * Removes every {@link AudioStream} in this group.
194     */
195    public void clear() {
196        for (AudioStream stream : getStreams()) {
197            stream.join(null);
198        }
199    }
200
201    @Override
202    protected void finalize() throws Throwable {
203        nativeRemove(0L);
204        super.finalize();
205    }
206}
207