accessibility_api_handler.js revision 1320f92c476a1ad9d19dba2a48c72b75566198e9
1// Copyright 2014 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5/**
6 * @fileoverview Accesses Chrome's accessibility extension API and gives
7 * spoken feedback for events that happen in the "Chrome of Chrome".
8 */
9
10goog.provide('cvox.AccessibilityApiHandler');
11
12goog.require('cvox.AbstractEarcons');
13goog.require('cvox.AbstractTts');
14goog.require('cvox.BrailleInterface');
15goog.require('cvox.BrailleUtil');
16goog.require('cvox.ChromeVoxEditableTextBase');
17goog.require('cvox.NavBraille');
18
19
20/**
21 * The chrome.experimental.accessibility API is moving to
22 * chrome.accessibilityPrivate, so provide an alias during the transition.
23 *
24 * TODO(dmazzoni): Remove after the stable version of Chrome no longer
25 * has the experimental accessibility API.
26 */
27chrome.experimental = chrome.experimental || {};
28/**
29 * Fall back on the experimental API if the new name is not available.
30 */
31chrome.accessibilityPrivate = chrome.accessibilityPrivate ||
32    chrome.experimental.accessibility;
33
34
35/**
36 * Class that adds listeners and handles events from the accessibility API.
37 * @constructor
38 * @implements {cvox.TtsCapturingEventListener}
39 * @param {cvox.TtsInterface} tts The TTS to use for speaking.
40 * @param {cvox.BrailleInterface} braille The braille interface to use for
41 * brailing.
42 * @param {Object} earcons The earcons object to use for playing
43 *        earcons.
44 */
45cvox.AccessibilityApiHandler = function(tts, braille, earcons) {
46  this.tts = tts;
47  this.braille = braille;
48  this.earcons = earcons;
49  /**
50   * Tracks the previous description received.
51   * @type {Object}
52   * @private
53   */
54  this.prevDescription_ = {};
55  /**
56   * Array of strings to speak the next time TTS is idle.
57   * @type {!Array.<string>}
58   * @private
59   */
60  this.idleSpeechQueue_ = [];
61
62  try {
63    chrome.accessibilityPrivate.setAccessibilityEnabled(true);
64    chrome.accessibilityPrivate.setNativeAccessibilityEnabled(
65        !cvox.ChromeVox.isActive);
66    this.addEventListeners_();
67    if (cvox.ChromeVox.isActive) {
68      this.queueAlertsForActiveTab();
69    }
70  } catch (err) {
71    console.log('Error trying to access accessibility extension api.');
72  }
73};
74
75/**
76 * The interface used to manage speech.
77 * @type {cvox.TtsInterface}
78 */
79cvox.AccessibilityApiHandler.prototype.tts = null;
80
81/**
82 * The interface used to manage braille.
83 * @type {cvox.BrailleInterface}
84 */
85cvox.AccessibilityApiHandler.prototype.braille = null;
86
87/**
88 * The object used to manage arcons.
89 * @type Object
90 */
91cvox.AccessibilityApiHandler.prototype.earcons = null;
92
93/**
94 * The object that can describe changes and cursor movement in a generic
95 *     editable text field.
96 * @type {Object}
97 */
98cvox.AccessibilityApiHandler.prototype.editableTextHandler = null;
99
100/**
101 * The name of the editable text field associated with
102 * |editableTextHandler|, so we can tell when focus moves.
103 * @type {string}
104 */
105cvox.AccessibilityApiHandler.prototype.editableTextName = '';
106
107/**
108 * The queue mode for the next focus event.
109 * @type {number}
110 */
111cvox.AccessibilityApiHandler.prototype.nextQueueMode = 0;
112
113/**
114 * The timeout id for the pending text changed event - the return
115 * value from window.setTimeout. We need to delay text events slightly
116 * and return only the last one because sometimes we get a rapid
117 * succession of related events that should all be considered one
118 * bulk change - in particular, autocomplete in the location bar comes
119 * as multiple events in a row.
120 * @type {?number}
121 */
122cvox.AccessibilityApiHandler.prototype.textChangeTimeout = null;
123
124/**
125 * Most controls have a "context" - the name of the window, dialog, toolbar,
126 * or menu they're contained in. We announce a context once, when you
127 * first enter it - and we don't announce it again when you move to something
128 * else within the same context. This variable keeps track of the most
129 * recent context.
130 * @type {?string}
131 */
132cvox.AccessibilityApiHandler.prototype.lastContext = null;
133
134/**
135 * Delay in ms between when a text event is received and when it's spoken.
136 * @type {number}
137 * @const
138 */
139cvox.AccessibilityApiHandler.prototype.TEXT_CHANGE_DELAY = 10;
140
141/**
142 * ID returned from setTimeout to queue up speech on idle.
143 * @type {?number}
144 * @private
145 */
146cvox.AccessibilityApiHandler.prototype.idleSpeechTimeout_ = null;
147
148/**
149 * Milliseconds of silence to wait before considering speech to be idle.
150 * @const
151 */
152cvox.AccessibilityApiHandler.prototype.IDLE_SPEECH_DELAY_MS = 500;
153
154/**
155 * Called to let us know that the last speech came from web, and not from
156 * native UI. Clear the context and any state associated with the last
157 * focused control.
158 */
159cvox.AccessibilityApiHandler.prototype.setWebContext = function() {
160  // This will never be spoken - it's just supposed to be a string that
161  // won't match the context of the next control that gets focused.
162  this.lastContext = '--internal-web--';
163  this.editableTextHandler = null;
164  this.editableTextName = '';
165
166  if (chrome.accessibilityPrivate.setFocusRing &&
167      cvox.ChromeVox.isChromeOS) {
168    // Clear the focus ring.
169    chrome.accessibilityPrivate.setFocusRing([]);
170  }
171};
172
173/**
174 * Adds event listeners.
175 * @private
176 */
177cvox.AccessibilityApiHandler.prototype.addEventListeners_ = function() {
178  /** Alias getMsg as msg. */
179  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
180
181  var accessibility = chrome.accessibilityPrivate;
182
183  chrome.tabs.onActivated.addListener(goog.bind(function(activeInfo) {
184    if (!cvox.ChromeVox.isActive) {
185      return;
186    }
187    chrome.tabs.get(activeInfo.tabId, goog.bind(function(tab) {
188      if (tab.status == 'loading') {
189        return;
190      }
191      this.queueAlertsForActiveTab();
192    }, this));
193  }, this));
194
195  chrome.accessibilityPrivate.onWindowOpened.addListener(
196      goog.bind(function(win) {
197    if (!cvox.ChromeVox.isActive) {
198      return;
199    }
200    this.tts.speak(win.name,
201                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
202                   cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
203    this.braille.write(cvox.NavBraille.fromText(win.name));
204    // Queue the next utterance because a window opening is always followed
205    // by a focus event.
206    this.nextQueueMode = 1;
207    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
208    this.queueAlertsForActiveTab();
209  }, this));
210
211  chrome.accessibilityPrivate.onWindowClosed.addListener(
212      goog.bind(function(win) {
213    if (!cvox.ChromeVox.isActive) {
214      return;
215    }
216    // Don't speak, just play the earcon.
217    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
218  }, this));
219
220  chrome.accessibilityPrivate.onMenuOpened.addListener(
221      goog.bind(function(menu) {
222    if (!cvox.ChromeVox.isActive) {
223      return;
224    }
225    this.tts.speak(msg('chrome_menu_opened', [menu.name]),
226                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
227                   cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
228    this.braille.write(
229        cvox.NavBraille.fromText(msg('chrome_menu_opened', [menu.name])));
230    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
231  }, this));
232
233  chrome.accessibilityPrivate.onMenuClosed.addListener(
234      goog.bind(function(menu) {
235    if (!cvox.ChromeVox.isActive) {
236      return;
237    }
238    // Don't speak, just play the earcon.
239    this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_CLOSE);
240  }, this));
241
242  // systemPrivate API is only available when this extension is loaded as a
243  // component extension embedded in Chrome.
244  chrome.permissions.contains(
245      { permissions: ['systemPrivate'] },
246      goog.bind(function(result) {
247    if (!result) {
248      return;
249    }
250
251    // TODO(plundblad): Remove when the native sound is turned on by default.
252    // See crbug.com:225886.
253    var addOnVolumeChangedListener = goog.bind(function() {
254      chrome.systemPrivate.onVolumeChanged.addListener(goog.bind(
255          function(volume) {
256        if (!cvox.ChromeVox.isActive) {
257          return;
258        }
259        // Don't speak, just play the earcon.
260        this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
261      }, this));
262    }, this);
263    if (chrome.commandLinePrivate) {
264      chrome.commandLinePrivate.hasSwitch('disable-volume-adjust-sound',
265          goog.bind(function(result) {
266        if (result) {
267          addOnVolumeChangedListener();
268        }
269      }, this));
270    } else {
271      addOnVolumeChangedListener();
272    }
273
274    chrome.systemPrivate.onBrightnessChanged.addListener(
275        goog.bind(
276        /**
277         * @param {{brightness: number, userInitiated: boolean}} brightness
278         */
279        function(brightness) {
280          if (brightness.userInitiated) {
281            this.earcons.playEarcon(cvox.AbstractEarcons.TASK_SUCCESS);
282            this.tts.speak(
283                msg('chrome_brightness_changed', [brightness.brightness]),
284                cvox.AbstractTts.QUEUE_MODE_FLUSH,
285                cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
286            this.braille.write(cvox.NavBraille.fromText(
287                msg('chrome_brightness_changed', [brightness.brightness])));
288          }
289        }, this));
290
291    chrome.systemPrivate.onScreenUnlocked.addListener(goog.bind(function() {
292      chrome.systemPrivate.getUpdateStatus(goog.bind(function(status) {
293        if (!cvox.ChromeVox.isActive) {
294          return;
295        }
296        // Speak about system update when it's ready, otherwise speak nothing.
297        if (status.state == 'NeedRestart') {
298          this.tts.speak(msg('chrome_system_need_restart'),
299                         cvox.AbstractTts.QUEUE_MODE_FLUSH,
300                         cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
301          this.braille.write(
302              cvox.NavBraille.fromText(msg('chrome_system_need_restart')));
303        }
304      }, this));
305    }, this));
306
307    chrome.systemPrivate.onWokeUp.addListener(goog.bind(function() {
308      if (!cvox.ChromeVox.isActive) {
309        return;
310      }
311      // Don't speak, just play the earcon.
312      this.earcons.playEarcon(cvox.AbstractEarcons.OBJECT_OPEN);
313    }, this));
314  }, this));
315
316  chrome.accessibilityPrivate.onControlFocused.addListener(
317      goog.bind(this.onControlFocused, this));
318
319  chrome.accessibilityPrivate.onControlAction.addListener(
320      goog.bind(function(ctl) {
321    if (!cvox.ChromeVox.isActive) {
322      return;
323    }
324
325    var description = this.describe(ctl, true);
326    this.tts.speak(description.utterance,
327                   cvox.AbstractTts.QUEUE_MODE_FLUSH,
328                   description.ttsProps);
329    description.braille.write();
330    if (description.earcon) {
331      this.earcons.playEarcon(description.earcon);
332    }
333  }, this));
334
335  try {
336    chrome.accessibilityPrivate.onControlHover.addListener(
337        goog.bind(function(ctl) {
338      if (!cvox.ChromeVox.isActive) {
339        return;
340      }
341
342      var hasTouch = 'ontouchstart' in window;
343      if (!hasTouch) {
344        return;
345      }
346
347      var description = this.describe(ctl, false);
348      this.tts.speak(description.utterance,
349                     cvox.AbstractTts.QUEUE_MODE_FLUSH,
350                     description.ttsProps);
351      description.braille.write();
352      if (description.earcon) {
353        this.earcons.playEarcon(description.earcon);
354      }
355    }, this));
356  } catch (e) {}
357
358  chrome.accessibilityPrivate.onTextChanged.addListener(
359       goog.bind(function(ctl) {
360    if (!cvox.ChromeVox.isActive) {
361      return;
362    }
363
364    if (!this.editableTextHandler ||
365        this.editableTextName != ctl.name ||
366        this.lastContext != ctl.context) {
367      // Chrome won't send a text change event on a control that isn't
368      // focused. If we get a text change event and it doesn't match the
369      // focused control, treat it as a focus event initially.
370      this.onControlFocused(ctl);
371      return;
372    }
373
374    // Only send the most recent text changed event - throw away anything
375    // that was pending.
376    if (this.textChangeTimeout) {
377      window.clearTimeout(this.textChangeTimeout);
378    }
379
380    // Handle the text change event after a small delay, so multiple
381    // events in rapid succession are handled as a single change. This is
382    // specifically for the location bar with autocomplete - typing a
383    // character and getting the autocompleted text and getting that
384    // text selected may be three separate events.
385    this.textChangeTimeout = window.setTimeout(
386        goog.bind(function() {
387          var textChangeEvent = new cvox.TextChangeEvent(
388              ctl.details.value,
389              ctl.details.selectionStart,
390              ctl.details.selectionEnd,
391              true);  // triggered by user
392          this.editableTextHandler.changed(
393              textChangeEvent);
394          this.describe(ctl, false).braille.write();
395        }, this), this.TEXT_CHANGE_DELAY);
396  }, this));
397
398  this.tts.addCapturingEventListener(this);
399};
400
401/**
402 * Handle the feedback when a new control gets focus.
403 * @param {AccessibilityObject} ctl The focused control.
404 */
405cvox.AccessibilityApiHandler.prototype.onControlFocused = function(ctl) {
406  if (!cvox.ChromeVox.isActive) {
407    return;
408  }
409
410  if (ctl.bounds &&
411      chrome.accessibilityPrivate.setFocusRing &&
412      cvox.ChromeVox.isChromeOS) {
413    chrome.accessibilityPrivate.setFocusRing([ctl.bounds]);
414  }
415
416  // Call this first because it may clear this.editableTextHandler.
417  var description = this.describe(ctl, false);
418
419  if (ctl.type == 'textbox') {
420    var start = ctl.details.selectionStart;
421    var end = ctl.details.selectionEnd;
422    if (start > end) {
423      start = ctl.details.selectionEnd;
424      end = ctl.details.selectionStart;
425    }
426    this.editableTextName = ctl.name;
427    this.editableTextHandler =
428        new cvox.ChromeVoxEditableTextBase(
429            ctl.details.value,
430            start,
431            end,
432            ctl.details.isPassword,
433            this.tts);
434  } else {
435    this.editableTextHandler = null;
436  }
437
438  this.tts.speak(description.utterance,
439                 this.nextQueueMode,
440                 description.ttsProps);
441  description.braille.write();
442  this.nextQueueMode = 0;
443  if (description.earcon) {
444    this.earcons.playEarcon(description.earcon);
445  }
446};
447
448/**
449 * Called when any speech starts.
450 */
451cvox.AccessibilityApiHandler.prototype.onTtsStart = function() {
452  if (this.idleSpeechTimeout_) {
453    window.clearTimeout(this.idleSpeechTimeout_);
454  }
455};
456
457/**
458 * Called when any speech ends.
459 */
460cvox.AccessibilityApiHandler.prototype.onTtsEnd = function() {
461  if (this.idleSpeechQueue_.length > 0) {
462    this.idleSpeechTimeout_ = window.setTimeout(
463        goog.bind(this.onTtsIdle, this),
464        this.IDLE_SPEECH_DELAY_MS);
465  }
466};
467
468/**
469 * Called when speech has been idle for a certain minimum delay.
470 * Speaks queued messages.
471 */
472cvox.AccessibilityApiHandler.prototype.onTtsIdle = function() {
473  if (this.idleSpeechQueue_.length == 0) {
474    return;
475  }
476  var utterance = this.idleSpeechQueue_.shift();
477  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
478  this.tts.speak(utterance,
479                 cvox.AbstractTts.QUEUE_MODE_FLUSH,
480                 cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT);
481};
482
483/**
484 * Given a control received from the accessibility api, determine an
485 * utterance to speak, text to braille, and an earcon to play to describe it.
486 * @param {Object} control The control that had an action performed on it.
487 * @param {boolean} isSelect True if the action is a select action,
488 *     otherwise it's a focus action.
489 * @return {Object} An object containing a string field |utterance|, object
490 *      |ttsProps|, |braille|, and earcon |earcon|.
491 */
492cvox.AccessibilityApiHandler.prototype.describe = function(control, isSelect) {
493  /** Alias getMsg as msg. */
494  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
495
496  var s = '';
497  var braille = {};
498  var ttsProps = cvox.AbstractTts.PERSONALITY_ANNOUNCEMENT;
499
500  var context = control.context;
501  if (context && context != this.lastContext) {
502    s += context + ', ';
503    this.lastContext = context;
504    this.editableTextHandler = null;
505  }
506
507  var earcon = undefined;
508  var name = control.name.replace(/[_&]+/g, '').replace('...', '');
509  braille.name = control.name;
510  switch (control.type) {
511    case 'checkbox':
512      braille.roleMsg = 'input_type_checkbox';
513      if (control.details.isChecked) {
514        earcon = cvox.AbstractEarcons.CHECK_ON;
515        s += msg('describe_checkbox_checked', [name]);
516        braille.state = msg('checkbox_checked_state_brl');
517      } else {
518        earcon = cvox.AbstractEarcons.CHECK_OFF;
519        s += msg('describe_checkbox_unchecked', [name]);
520        braille.state = msg('checkbox_unchecked_state_brl');
521      }
522      break;
523    case 'radiobutton':
524      s += name;
525      braille.roleMsg = 'input_type_radio';
526      if (control.details.isChecked) {
527        earcon = cvox.AbstractEarcons.CHECK_ON;
528        s += msg('describe_radio_selected', [name]);
529        braille.state = msg('radio_selected_state_brl');
530      } else {
531        earcon = cvox.AbstractEarcons.CHECK_OFF;
532        s += msg('describe_radio_unselected', [name]);
533        braille.state = msg('radio_unselected_state_brl');
534      }
535      break;
536    case 'menu':
537      s += msg('describe_menu', [name]);
538      braille.roleMsg = 'aria_role_menu';
539      break;
540    case 'menuitem':
541      s += msg(
542          control.details.hasSubmenu ?
543              'describe_menu_item_with_submenu' : 'describe_menu_item', [name]);
544      braille.roleMsg = 'aria_role_menuitem';
545      if (control.details.hasSubmenu) {
546        braille.state = msg('aria_has_submenu_brl');
547      }
548      break;
549    case 'window':
550      s += msg('describe_window', [name]);
551      // No specialization for braille.
552      braille.name = s;
553      break;
554    case 'alert':
555      earcon = cvox.AbstractEarcons.ALERT_NONMODAL;
556      s += msg('aria_role_alert') + ': ' + name;
557      ttsProps = cvox.AbstractTts.PERSONALITY_SYSTEM_ALERT;
558      braille.roleMsg = 'aria_role_alert';
559      isSelect = false;
560      break;
561    case 'textbox':
562      earcon = cvox.AbstractEarcons.EDITABLE_TEXT;
563      var unnamed = name == '' ? 'unnamed_' : '';
564      var type, value;
565      if (control.details.isPassword) {
566        type = 'password';
567        braille.roleMsg = 'input_type_password';
568        value = control.details.value.replace(/./g, '*');
569      } else {
570        type = 'textbox';
571        braille.roleMsg = 'input_type_text';
572        value = control.details.value;
573      }
574      s += msg('describe_' + unnamed + type, [value, name]);
575      braille.value = cvox.BrailleUtil.createValue(
576          value, control.details.selectionStart, control.details.selectionEnd);
577      break;
578    case 'button':
579      earcon = cvox.AbstractEarcons.BUTTON;
580      s += msg('describe_button', [name]);
581      braille.roleMsg = 'tag_button';
582      break;
583    case 'statictext':
584      s += control.name;
585      break;
586    case 'combobox':
587    case 'listbox':
588      earcon = cvox.AbstractEarcons.LISTBOX;
589      var unnamed = name == '' ? 'unnamed_' : '';
590      s += msg('describe_' + unnamed + control.type,
591                            [control.details.value, name]);
592      braille.roleMsg = 'tag_select';
593      break;
594    case 'link':
595      earcon = cvox.AbstractEarcons.LINK;
596      s += msg('describe_link', [name]);
597      braille.roleMsg = 'tag_link';
598      break;
599    case 'tab':
600      s += msg('describe_tab', [name]);
601      braille.roleMsg = 'aria_role_tab';
602      break;
603    case 'slider':
604      s += msg('describe_slider', [control.details.stringValue, name]);
605      braille.value = cvox.BrailleUtil.createValue(control.details.stringValue);
606      braille.roleMsg = 'aria_role_slider';
607      break;
608    case 'treeitem':
609      if (this.prevDescription_ &&
610          this.prevDescription_.details &&
611          goog.isDef(control.details.itemDepth) &&
612          this.prevDescription_.details.itemDepth !=
613              control.details.itemDepth) {
614        s += msg('describe_depth', [control.details.itemDepth]);
615      }
616      s += name + ' ' + msg('aria_role_treeitem');
617      s += control.details.isItemExpanded ?
618          msg('aria_expanded_true') : msg('aria_expanded_false');
619
620      braille.name = Array(control.details.itemDepth).join(' ') + braille.name;
621      braille.roleMsg = 'aria_role_treeitem';
622      braille.state = control.details.isItemExpanded ?
623          msg('aria_expanded_true_brl') : msg('aria_expanded_false_brl');
624      break;
625
626    default:
627      s += name + ', ' + control.type;
628      braille.role = control.type;
629  }
630
631  if (isSelect && control.type != 'slider') {
632    s += msg('describe_selected');
633  }
634  if (control.details && control.details.itemCount >= 0) {
635    s += msg('describe_index',
636        [control.details.itemIndex + 1, control.details.itemCount]);
637    braille.state = braille.state ? braille.state + ' ' : '';
638    braille.state += msg('LIST_POSITION_BRL',
639        [control.details.itemIndex + 1, control.details.itemCount]);
640  }
641
642  var description = {};
643  description.utterance = s;
644  description.ttsProps = ttsProps;
645  var spannable = cvox.BrailleUtil.getTemplated(null, null, braille);
646  var valueSelectionSpan = spannable.getSpanInstanceOf(
647      cvox.BrailleUtil.ValueSelectionSpan);
648  var brailleObj = {text: spannable};
649  if (valueSelectionSpan) {
650    brailleObj.startIndex = spannable.getSpanStart(valueSelectionSpan);
651    brailleObj.endIndex = spannable.getSpanEnd(valueSelectionSpan);
652  }
653  description.braille = new cvox.NavBraille(brailleObj);
654  description.earcon = earcon;
655  this.prevDescription_ = control;
656  return description;
657};
658
659/**
660 * Queues alerts for the active tab, if any, which will be spoken
661 * as soon as speech is idle.
662 */
663cvox.AccessibilityApiHandler.prototype.queueAlertsForActiveTab = function() {
664  this.idleSpeechQueue_.length = 0;
665  var msg = goog.bind(cvox.ChromeVox.msgs.getMsg, cvox.ChromeVox.msgs);
666
667  chrome.tabs.query({'active': true, 'currentWindow': true},
668      goog.bind(function(tabs) {
669    if (tabs.length < 1) {
670      return;
671    }
672    chrome.accessibilityPrivate.getAlertsForTab(
673        tabs[0].id, goog.bind(function(alerts) {
674      if (alerts.length == 0) {
675        return;
676      }
677
678      var utterance = '';
679
680      if (alerts.length == 1) {
681        utterance += msg('page_has_one_alert_singular');
682      } else {
683        utterance += msg('page_has_alerts_plural',
684                         [alerts.length]);
685      }
686
687      for (var i = 0; i < alerts.length; i++) {
688        utterance += ' ' + alerts[i].message;
689      }
690
691      utterance += ' ' + msg('review_alerts');
692
693      if (this.idleSpeechQueue_.indexOf(utterance) == -1) {
694        this.idleSpeechQueue_.push(utterance);
695      }
696    }, this));
697  }, this));
698};
699