RecognizerIntent.java revision 482c9bb390f1dae8e55806431a960f9df4d2f0b8
1/*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package android.speech;
18
19import java.util.ArrayList;
20
21import android.app.Activity;
22import android.content.ActivityNotFoundException;
23import android.content.BroadcastReceiver;
24import android.content.ComponentName;
25import android.content.Context;
26import android.content.Intent;
27import android.content.pm.PackageManager;
28import android.content.pm.ResolveInfo;
29import android.os.Bundle;
30
31/**
32 * Constants for supporting speech recognition through starting an {@link Intent}
33 */
34public class RecognizerIntent {
35    /**
36     * The extra key used in an intent to the speech recognizer for voice search. Not
37     * generally to be used by developers. The system search dialog uses this, for example,
38     * to set a calling package for identification by a voice search API. If this extra
39     * is set by anyone but the system process, it should be overridden by the voice search
40     * implementation.
41     */
42    public final static String EXTRA_CALLING_PACKAGE = "calling_package";
43
44    private RecognizerIntent() {
45        // Not for instantiating.
46    }
47
48    /**
49     * Starts an activity that will prompt the user for speech and send it through a
50     * speech recognizer.  The results will be returned via activity results (in
51     * {@link Activity#onActivityResult}, if you start the intent using
52     * {@link Activity#startActivityForResult(Intent, int)}), or forwarded via a PendingIntent
53     * if one is provided.
54     *
55     * <p>Starting this intent with just {@link Activity#startActivity(Intent)} is not supported.
56     * You must either use {@link Activity#startActivityForResult(Intent, int)}, or provide a
57     * PendingIntent, to receive recognition results.
58     *
59     * <p>Required extras:
60     * <ul>
61     *   <li>{@link #EXTRA_LANGUAGE_MODEL}
62     * </ul>
63     *
64     * <p>Optional extras:
65     * <ul>
66     *   <li>{@link #EXTRA_PROMPT}
67     *   <li>{@link #EXTRA_LANGUAGE}
68     *   <li>{@link #EXTRA_MAX_RESULTS}
69     *   <li>{@link #EXTRA_RESULTS_PENDINGINTENT}
70     *   <li>{@link #EXTRA_RESULTS_PENDINGINTENT_BUNDLE}
71     * </ul>
72     *
73     * <p> Result extras (returned in the result, not to be specified in the request):
74     * <ul>
75     *   <li>{@link #EXTRA_RESULTS}
76     * </ul>
77     *
78     * <p>NOTE: There may not be any applications installed to handle this action, so you should
79     * make sure to catch {@link ActivityNotFoundException}.
80     */
81    public static final String ACTION_RECOGNIZE_SPEECH = "android.speech.action.RECOGNIZE_SPEECH";
82
83    /**
84     * Starts an activity that will prompt the user for speech, send it through a
85     * speech recognizer, and either display a web search result or trigger
86     * another type of action based on the user's speech.
87     *
88     * <p>If you want to avoid triggering any type of action besides web search, you can use
89     * the {@link #EXTRA_WEB_SEARCH_ONLY} extra.
90     *
91     * <p>Required extras:
92     * <ul>
93     *   <li>{@link #EXTRA_LANGUAGE_MODEL}
94     * </ul>
95     *
96     * <p>Optional extras:
97     * <ul>
98     *   <li>{@link #EXTRA_PROMPT}
99     *   <li>{@link #EXTRA_LANGUAGE}
100     *   <li>{@link #EXTRA_MAX_RESULTS}
101     *   <li>{@link #EXTRA_PARTIAL_RESULTS}
102     *   <li>{@link #EXTRA_WEB_SEARCH_ONLY}
103     *   <li>{@link #EXTRA_ORIGIN}
104     * </ul>
105     *
106     * <p> Result extras (returned in the result, not to be specified in the request):
107     * <ul>
108     *   <li>{@link #EXTRA_RESULTS}
109     *   <li>{@link #EXTRA_CONFIDENCE_SCORES} (optional)
110     * </ul>
111     *
112     * <p>NOTE: There may not be any applications installed to handle this action, so you should
113     * make sure to catch {@link ActivityNotFoundException}.
114     */
115    public static final String ACTION_WEB_SEARCH = "android.speech.action.WEB_SEARCH";
116
117    /**
118     * Starts an activity that will prompt the user for speech without requiring the user's
119     * visual attention or touch input. It will send it through a speech recognizer,
120     * and either synthesize speech for a web search result or trigger
121     * another type of action based on the user's speech.
122     *
123     * This activity may be launched while device is locked in a secure mode.
124     * Special care must be taken to ensure that the voice actions that are performed while
125     * hands free cannot compromise the device's security.
126     * The activity should check the value of the {@link #EXTRA_SECURE} extra to determine
127     * whether the device has been securely locked. If so, the activity should either restrict
128     * the set of voice actions that are permitted or require some form of secure
129     * authentication before proceeding.
130     *
131     * To ensure that the activity's user interface is visible while the lock screen is showing,
132     * the activity should set the
133     * {@link android.view.WindowManager.LayoutParams#FLAG_SHOW_WHEN_LOCKED} window flag.
134     * Otherwise the activity's user interface may be hidden by the lock screen. The activity
135     * should take care not to leak private information when the device is securely locked.
136     *
137     * <p>Optional extras:
138     * <ul>
139     *   <li>{@link #EXTRA_SECURE}
140     * </ul>
141     */
142    public static final String ACTION_VOICE_SEARCH_HANDS_FREE =
143            "android.speech.action.VOICE_SEARCH_HANDS_FREE";
144
145    /**
146     * Optional boolean to indicate that a "hands free" voice search was performed while the device
147     * was in a secure mode. An example of secure mode is when the device's screen lock is active,
148     * and it requires some form of authentication to be unlocked.
149     *
150     * When the device is securely locked, the voice search activity should either restrict
151     * the set of voice actions that are permitted, or require some form of secure authentication
152     * before proceeding.
153     */
154    public static final String EXTRA_SECURE = "android.speech.extras.EXTRA_SECURE";
155
156    /**
157     * The minimum length of an utterance. We will not stop recording before this amount of time.
158     *
159     * Note that it is extremely rare you'd want to specify this value in an intent. If you don't
160     * have a very good reason to change these, you should leave them as they are. Note also that
161     * certain values may cause undesired or unexpected results - use judiciously! Additionally,
162     * depending on the recognizer implementation, these values may have no effect.
163     */
164    public static final String EXTRA_SPEECH_INPUT_MINIMUM_LENGTH_MILLIS =
165            "android.speech.extras.SPEECH_INPUT_MINIMUM_LENGTH_MILLIS";
166
167    /**
168     * The amount of time that it should take after we stop hearing speech to consider the input
169     * complete.
170     *
171     * Note that it is extremely rare you'd want to specify this value in an intent. If
172     * you don't have a very good reason to change these, you should leave them as they are. Note
173     * also that certain values may cause undesired or unexpected results - use judiciously!
174     * Additionally, depending on the recognizer implementation, these values may have no effect.
175     */
176    public static final String EXTRA_SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS =
177            "android.speech.extras.SPEECH_INPUT_COMPLETE_SILENCE_LENGTH_MILLIS";
178
179    /**
180     * The amount of time that it should take after we stop hearing speech to consider the input
181     * possibly complete. This is used to prevent the endpointer cutting off during very short
182     * mid-speech pauses.
183     *
184     * Note that it is extremely rare you'd want to specify this value in an intent. If
185     * you don't have a very good reason to change these, you should leave them as they are. Note
186     * also that certain values may cause undesired or unexpected results - use judiciously!
187     * Additionally, depending on the recognizer implementation, these values may have no effect.
188     */
189    public static final String EXTRA_SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS =
190            "android.speech.extras.SPEECH_INPUT_POSSIBLY_COMPLETE_SILENCE_LENGTH_MILLIS";
191
192    /**
193     * Informs the recognizer which speech model to prefer when performing
194     * {@link #ACTION_RECOGNIZE_SPEECH}. The recognizer uses this
195     * information to fine tune the results. This extra is required. Activities implementing
196     * {@link #ACTION_RECOGNIZE_SPEECH} may interpret the values as they see fit.
197     *
198     *  @see #LANGUAGE_MODEL_FREE_FORM
199     *  @see #LANGUAGE_MODEL_WEB_SEARCH
200     */
201    public static final String EXTRA_LANGUAGE_MODEL = "android.speech.extra.LANGUAGE_MODEL";
202
203    /**
204     * Use a language model based on free-form speech recognition.  This is a value to use for
205     * {@link #EXTRA_LANGUAGE_MODEL}.
206     * @see #EXTRA_LANGUAGE_MODEL
207     */
208    public static final String LANGUAGE_MODEL_FREE_FORM = "free_form";
209    /**
210     * Use a language model based on web search terms.  This is a value to use for
211     * {@link #EXTRA_LANGUAGE_MODEL}.
212     * @see #EXTRA_LANGUAGE_MODEL
213     */
214    public static final String LANGUAGE_MODEL_WEB_SEARCH = "web_search";
215
216    /** Optional text prompt to show to the user when asking them to speak. */
217    public static final String EXTRA_PROMPT = "android.speech.extra.PROMPT";
218
219    /**
220     * Optional IETF language tag (as defined by BCP 47), for example "en-US". This tag informs the
221     * recognizer to perform speech recognition in a language different than the one set in the
222     * {@link java.util.Locale#getDefault()}.
223     */
224    public static final String EXTRA_LANGUAGE = "android.speech.extra.LANGUAGE";
225
226    /**
227     * Optional value which can be used to indicate the referer url of a page in which
228     * speech was requested. For example, a web browser may choose to provide this for
229     * uses of speech on a given page.
230     */
231    public static final String EXTRA_ORIGIN = "android.speech.extra.ORIGIN";
232
233    /**
234     * Optional limit on the maximum number of results to return. If omitted the recognizer
235     * will choose how many results to return. Must be an integer.
236     */
237    public static final String EXTRA_MAX_RESULTS = "android.speech.extra.MAX_RESULTS";
238
239    /**
240     * Optional boolean, to be used with {@link #ACTION_WEB_SEARCH}, to indicate whether to
241     * only fire web searches in response to a user's speech. The default is false, meaning
242     * that other types of actions can be taken based on the user's speech.
243     */
244    public static final String EXTRA_WEB_SEARCH_ONLY = "android.speech.extra.WEB_SEARCH_ONLY";
245
246    /**
247     * Optional boolean to indicate whether partial results should be returned by the recognizer
248     * as the user speaks (default is false).  The server may ignore a request for partial
249     * results in some or all cases.
250     */
251    public static final String EXTRA_PARTIAL_RESULTS = "android.speech.extra.PARTIAL_RESULTS";
252
253    /**
254     * When the intent is {@link #ACTION_RECOGNIZE_SPEECH}, the speech input activity will
255     * return results to you via the activity results mechanism.  Alternatively, if you use this
256     * extra to supply a PendingIntent, the results will be added to its bundle and the
257     * PendingIntent will be sent to its target.
258     */
259    public static final String EXTRA_RESULTS_PENDINGINTENT =
260            "android.speech.extra.RESULTS_PENDINGINTENT";
261
262    /**
263     * If you use {@link #EXTRA_RESULTS_PENDINGINTENT} to supply a forwarding intent, you can
264     * also use this extra to supply additional extras for the final intent.  The search results
265     * will be added to this bundle, and the combined bundle will be sent to the target.
266     */
267    public static final String EXTRA_RESULTS_PENDINGINTENT_BUNDLE =
268            "android.speech.extra.RESULTS_PENDINGINTENT_BUNDLE";
269
270    /** Result code returned when no matches are found for the given speech */
271    public static final int RESULT_NO_MATCH = Activity.RESULT_FIRST_USER;
272    /** Result code returned when there is a generic client error */
273    public static final int RESULT_CLIENT_ERROR = Activity.RESULT_FIRST_USER + 1;
274    /** Result code returned when the recognition server returns an error */
275    public static final int RESULT_SERVER_ERROR = Activity.RESULT_FIRST_USER + 2;
276    /** Result code returned when a network error was encountered */
277    public static final int RESULT_NETWORK_ERROR = Activity.RESULT_FIRST_USER + 3;
278    /** Result code returned when an audio error was encountered */
279    public static final int RESULT_AUDIO_ERROR = Activity.RESULT_FIRST_USER + 4;
280
281    /**
282     * An ArrayList&lt;String&gt; of the recognition results when performing
283     * {@link #ACTION_RECOGNIZE_SPEECH}. Generally this list should be ordered in
284     * descending order of speech recognizer confidence. (See {@link #EXTRA_CONFIDENCE_SCORES}).
285     * Returned in the results; not to be specified in the recognition request. Only present
286     * when {@link Activity#RESULT_OK} is returned in an activity result. In a PendingIntent,
287     * the lack of this extra indicates failure.
288     */
289    public static final String EXTRA_RESULTS = "android.speech.extra.RESULTS";
290
291    /**
292     * A float array of confidence scores of the recognition results when performing
293     * {@link #ACTION_RECOGNIZE_SPEECH}. The array should be the same size as the ArrayList
294     * returned in {@link #EXTRA_RESULTS}, and should contain values ranging from 0.0 to 1.0,
295     * or -1 to represent an unavailable confidence score.
296     * <p>
297     * Confidence values close to 1.0 indicate high confidence (the speech recognizer is
298     * confident that the recognition result is correct), while values close to 0.0 indicate
299     * low confidence.
300     * <p>
301     * Returned in the results; not to be specified in the recognition request. This extra is
302     * optional and might not be provided. Only present when {@link Activity#RESULT_OK} is
303     * returned in an activity result.
304     */
305    public static final String EXTRA_CONFIDENCE_SCORES = "android.speech.extra.CONFIDENCE_SCORES";
306
307    /**
308     * Returns the broadcast intent to fire with
309     * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, Bundle)}
310     * to receive details from the package that implements voice search.
311     * <p>
312     * This is based on the value specified by the voice search {@link Activity} in
313     * {@link #DETAILS_META_DATA}, and if this is not specified, will return null. Also if there
314     * is no chosen default to resolve for {@link #ACTION_WEB_SEARCH}, this will return null.
315     * <p>
316     * If an intent is returned and is fired, a {@link Bundle} of extras will be returned to the
317     * provided result receiver, and should ideally contain values for
318     * {@link #EXTRA_LANGUAGE_PREFERENCE} and {@link #EXTRA_SUPPORTED_LANGUAGES}.
319     * <p>
320     * (Whether these are actually provided is up to the particular implementation. It is
321     * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this
322     * information, but it is not required.)
323     *
324     * @param context a context object
325     * @return the broadcast intent to fire or null if not available
326     */
327    public static final Intent getVoiceDetailsIntent(Context context) {
328        Intent voiceSearchIntent = new Intent(ACTION_WEB_SEARCH);
329        ResolveInfo ri = context.getPackageManager().resolveActivity(
330                voiceSearchIntent, PackageManager.GET_META_DATA);
331        if (ri == null || ri.activityInfo == null || ri.activityInfo.metaData == null) return null;
332
333        String className = ri.activityInfo.metaData.getString(DETAILS_META_DATA);
334        if (className == null) return null;
335
336        Intent detailsIntent = new Intent(ACTION_GET_LANGUAGE_DETAILS);
337        detailsIntent.setComponent(new ComponentName(ri.activityInfo.packageName, className));
338        return detailsIntent;
339    }
340
341    /**
342     * Meta-data name under which an {@link Activity} implementing {@link #ACTION_WEB_SEARCH} can
343     * use to expose the class name of a {@link BroadcastReceiver} which can respond to request for
344     * more information, from any of the broadcast intents specified in this class.
345     * <p>
346     * Broadcast intents can be directed to the class name specified in the meta-data by creating
347     * an {@link Intent}, setting the component with
348     * {@link Intent#setComponent(android.content.ComponentName)}, and using
349     * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)}
350     * with another {@link BroadcastReceiver} which can receive the results.
351     * <p>
352     * The {@link #getVoiceDetailsIntent(Context)} method is provided as a convenience to create
353     * a broadcast intent based on the value of this meta-data, if available.
354     * <p>
355     * This is optional and not all {@link Activity}s which implement {@link #ACTION_WEB_SEARCH}
356     * are required to implement this. Thus retrieving this meta-data may be null.
357     */
358    public static final String DETAILS_META_DATA = "android.speech.DETAILS";
359
360    /**
361     * A broadcast intent which can be fired to the {@link BroadcastReceiver} component specified
362     * in the meta-data defined in the {@link #DETAILS_META_DATA} meta-data of an
363     * {@link Activity} satisfying {@link #ACTION_WEB_SEARCH}.
364     * <p>
365     * When fired with
366     * {@link Context#sendOrderedBroadcast(Intent, String, BroadcastReceiver, android.os.Handler, int, String, android.os.Bundle)},
367     * a {@link Bundle} of extras will be returned to the provided result receiver, and should
368     * ideally contain values for {@link #EXTRA_LANGUAGE_PREFERENCE} and
369     * {@link #EXTRA_SUPPORTED_LANGUAGES}.
370     * <p>
371     * (Whether these are actually provided is up to the particular implementation. It is
372     * recommended that {@link Activity}s implementing {@link #ACTION_WEB_SEARCH} provide this
373     * information, but it is not required.)
374     */
375    public static final String ACTION_GET_LANGUAGE_DETAILS =
376            "android.speech.action.GET_LANGUAGE_DETAILS";
377
378    /**
379     * Specify this boolean extra in a broadcast of {@link #ACTION_GET_LANGUAGE_DETAILS} to
380     * indicate that only the current language preference is needed in the response. This
381     * avoids any additional computation if all you need is {@link #EXTRA_LANGUAGE_PREFERENCE}
382     * in the response.
383     */
384    public static final String EXTRA_ONLY_RETURN_LANGUAGE_PREFERENCE =
385            "android.speech.extra.ONLY_RETURN_LANGUAGE_PREFERENCE";
386
387    /**
388     * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS}
389     * which is a {@link String} that represents the current language preference this user has
390     * specified - a locale string like "en-US".
391     */
392    public static final String EXTRA_LANGUAGE_PREFERENCE =
393            "android.speech.extra.LANGUAGE_PREFERENCE";
394
395    /**
396     * The key to the extra in the {@link Bundle} returned by {@link #ACTION_GET_LANGUAGE_DETAILS}
397     * which is an {@link ArrayList} of {@link String}s that represents the languages supported by
398     * this implementation of voice recognition - a list of strings like "en-US", "cmn-Hans-CN",
399     * etc.
400     */
401    public static final String EXTRA_SUPPORTED_LANGUAGES =
402            "android.speech.extra.SUPPORTED_LANGUAGES";
403}
404