1// Copyright 2013 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "hydrogen.h"
29
30#include <algorithm>
31
32#include "v8.h"
33#include "allocation-site-scopes.h"
34#include "codegen.h"
35#include "full-codegen.h"
36#include "hashmap.h"
37#include "hydrogen-bce.h"
38#include "hydrogen-bch.h"
39#include "hydrogen-canonicalize.h"
40#include "hydrogen-check-elimination.h"
41#include "hydrogen-dce.h"
42#include "hydrogen-dehoist.h"
43#include "hydrogen-environment-liveness.h"
44#include "hydrogen-escape-analysis.h"
45#include "hydrogen-infer-representation.h"
46#include "hydrogen-infer-types.h"
47#include "hydrogen-load-elimination.h"
48#include "hydrogen-gvn.h"
49#include "hydrogen-mark-deoptimize.h"
50#include "hydrogen-mark-unreachable.h"
51#include "hydrogen-minus-zero.h"
52#include "hydrogen-osr.h"
53#include "hydrogen-range-analysis.h"
54#include "hydrogen-redundant-phi.h"
55#include "hydrogen-removable-simulates.h"
56#include "hydrogen-representation-changes.h"
57#include "hydrogen-sce.h"
58#include "hydrogen-uint32-analysis.h"
59#include "lithium-allocator.h"
60#include "parser.h"
61#include "runtime.h"
62#include "scopeinfo.h"
63#include "scopes.h"
64#include "stub-cache.h"
65#include "typing.h"
66
67#if V8_TARGET_ARCH_IA32
68#include "ia32/lithium-codegen-ia32.h"
69#elif V8_TARGET_ARCH_X64
70#include "x64/lithium-codegen-x64.h"
71#elif V8_TARGET_ARCH_ARM
72#include "arm/lithium-codegen-arm.h"
73#elif V8_TARGET_ARCH_MIPS
74#include "mips/lithium-codegen-mips.h"
75#else
76#error Unsupported target architecture.
77#endif
78
79namespace v8 {
80namespace internal {
81
82HBasicBlock::HBasicBlock(HGraph* graph)
83    : block_id_(graph->GetNextBlockID()),
84      graph_(graph),
85      phis_(4, graph->zone()),
86      first_(NULL),
87      last_(NULL),
88      end_(NULL),
89      loop_information_(NULL),
90      predecessors_(2, graph->zone()),
91      dominator_(NULL),
92      dominated_blocks_(4, graph->zone()),
93      last_environment_(NULL),
94      argument_count_(-1),
95      first_instruction_index_(-1),
96      last_instruction_index_(-1),
97      deleted_phis_(4, graph->zone()),
98      parent_loop_header_(NULL),
99      inlined_entry_block_(NULL),
100      is_inline_return_target_(false),
101      is_reachable_(true),
102      dominates_loop_successors_(false),
103      is_osr_entry_(false) { }
104
105
106Isolate* HBasicBlock::isolate() const {
107  return graph_->isolate();
108}
109
110
111void HBasicBlock::MarkUnreachable() {
112  is_reachable_ = false;
113}
114
115
116void HBasicBlock::AttachLoopInformation() {
117  ASSERT(!IsLoopHeader());
118  loop_information_ = new(zone()) HLoopInformation(this, zone());
119}
120
121
122void HBasicBlock::DetachLoopInformation() {
123  ASSERT(IsLoopHeader());
124  loop_information_ = NULL;
125}
126
127
128void HBasicBlock::AddPhi(HPhi* phi) {
129  ASSERT(!IsStartBlock());
130  phis_.Add(phi, zone());
131  phi->SetBlock(this);
132}
133
134
135void HBasicBlock::RemovePhi(HPhi* phi) {
136  ASSERT(phi->block() == this);
137  ASSERT(phis_.Contains(phi));
138  phi->Kill();
139  phis_.RemoveElement(phi);
140  phi->SetBlock(NULL);
141}
142
143
144void HBasicBlock::AddInstruction(HInstruction* instr, int position) {
145  ASSERT(!IsStartBlock() || !IsFinished());
146  ASSERT(!instr->IsLinked());
147  ASSERT(!IsFinished());
148
149  if (position != RelocInfo::kNoPosition) {
150    instr->set_position(position);
151  }
152  if (first_ == NULL) {
153    ASSERT(last_environment() != NULL);
154    ASSERT(!last_environment()->ast_id().IsNone());
155    HBlockEntry* entry = new(zone()) HBlockEntry();
156    entry->InitializeAsFirst(this);
157    if (position != RelocInfo::kNoPosition) {
158      entry->set_position(position);
159    } else {
160      ASSERT(!FLAG_emit_opt_code_positions ||
161             !graph()->info()->IsOptimizing());
162    }
163    first_ = last_ = entry;
164  }
165  instr->InsertAfter(last_);
166}
167
168
169HPhi* HBasicBlock::AddNewPhi(int merged_index) {
170  if (graph()->IsInsideNoSideEffectsScope()) {
171    merged_index = HPhi::kInvalidMergedIndex;
172  }
173  HPhi* phi = new(zone()) HPhi(merged_index, zone());
174  AddPhi(phi);
175  return phi;
176}
177
178
179HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
180                                       RemovableSimulate removable) {
181  ASSERT(HasEnvironment());
182  HEnvironment* environment = last_environment();
183  ASSERT(ast_id.IsNone() ||
184         ast_id == BailoutId::StubEntry() ||
185         environment->closure()->shared()->VerifyBailoutId(ast_id));
186
187  int push_count = environment->push_count();
188  int pop_count = environment->pop_count();
189
190  HSimulate* instr =
191      new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
192#ifdef DEBUG
193  instr->set_closure(environment->closure());
194#endif
195  // Order of pushed values: newest (top of stack) first. This allows
196  // HSimulate::MergeWith() to easily append additional pushed values
197  // that are older (from further down the stack).
198  for (int i = 0; i < push_count; ++i) {
199    instr->AddPushedValue(environment->ExpressionStackAt(i));
200  }
201  for (GrowableBitVector::Iterator it(environment->assigned_variables(),
202                                      zone());
203       !it.Done();
204       it.Advance()) {
205    int index = it.Current();
206    instr->AddAssignedValue(index, environment->Lookup(index));
207  }
208  environment->ClearHistory();
209  return instr;
210}
211
212
213void HBasicBlock::Finish(HControlInstruction* end, int position) {
214  ASSERT(!IsFinished());
215  AddInstruction(end, position);
216  end_ = end;
217  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
218    it.Current()->RegisterPredecessor(this);
219  }
220}
221
222
223void HBasicBlock::Goto(HBasicBlock* block,
224                       int position,
225                       FunctionState* state,
226                       bool add_simulate) {
227  bool drop_extra = state != NULL &&
228      state->inlining_kind() == DROP_EXTRA_ON_RETURN;
229
230  if (block->IsInlineReturnTarget()) {
231    HEnvironment* env = last_environment();
232    int argument_count = env->arguments_environment()->parameter_count();
233    AddInstruction(new(zone())
234                   HLeaveInlined(state->entry(), argument_count),
235                   position);
236    UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
237  }
238
239  if (add_simulate) AddNewSimulate(BailoutId::None(), position);
240  HGoto* instr = new(zone()) HGoto(block);
241  Finish(instr, position);
242}
243
244
245void HBasicBlock::AddLeaveInlined(HValue* return_value,
246                                  FunctionState* state,
247                                  int position) {
248  HBasicBlock* target = state->function_return();
249  bool drop_extra = state->inlining_kind() == DROP_EXTRA_ON_RETURN;
250
251  ASSERT(target->IsInlineReturnTarget());
252  ASSERT(return_value != NULL);
253  HEnvironment* env = last_environment();
254  int argument_count = env->arguments_environment()->parameter_count();
255  AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
256                 position);
257  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
258  last_environment()->Push(return_value);
259  AddNewSimulate(BailoutId::None(), position);
260  HGoto* instr = new(zone()) HGoto(target);
261  Finish(instr, position);
262}
263
264
265void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
266  ASSERT(!HasEnvironment());
267  ASSERT(first() == NULL);
268  UpdateEnvironment(env);
269}
270
271
272void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
273  last_environment_ = env;
274  graph()->update_maximum_environment_size(env->first_expression_index());
275}
276
277
278void HBasicBlock::SetJoinId(BailoutId ast_id) {
279  int length = predecessors_.length();
280  ASSERT(length > 0);
281  for (int i = 0; i < length; i++) {
282    HBasicBlock* predecessor = predecessors_[i];
283    ASSERT(predecessor->end()->IsGoto());
284    HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
285    ASSERT(i != 0 ||
286           (predecessor->last_environment()->closure().is_null() ||
287            predecessor->last_environment()->closure()->shared()
288              ->VerifyBailoutId(ast_id)));
289    simulate->set_ast_id(ast_id);
290    predecessor->last_environment()->set_ast_id(ast_id);
291  }
292}
293
294
295bool HBasicBlock::Dominates(HBasicBlock* other) const {
296  HBasicBlock* current = other->dominator();
297  while (current != NULL) {
298    if (current == this) return true;
299    current = current->dominator();
300  }
301  return false;
302}
303
304
305int HBasicBlock::LoopNestingDepth() const {
306  const HBasicBlock* current = this;
307  int result  = (current->IsLoopHeader()) ? 1 : 0;
308  while (current->parent_loop_header() != NULL) {
309    current = current->parent_loop_header();
310    result++;
311  }
312  return result;
313}
314
315
316void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
317  ASSERT(IsLoopHeader());
318
319  SetJoinId(stmt->EntryId());
320  if (predecessors()->length() == 1) {
321    // This is a degenerated loop.
322    DetachLoopInformation();
323    return;
324  }
325
326  // Only the first entry into the loop is from outside the loop. All other
327  // entries must be back edges.
328  for (int i = 1; i < predecessors()->length(); ++i) {
329    loop_information()->RegisterBackEdge(predecessors()->at(i));
330  }
331}
332
333
334void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
335  if (HasPredecessor()) {
336    // Only loop header blocks can have a predecessor added after
337    // instructions have been added to the block (they have phis for all
338    // values in the environment, these phis may be eliminated later).
339    ASSERT(IsLoopHeader() || first_ == NULL);
340    HEnvironment* incoming_env = pred->last_environment();
341    if (IsLoopHeader()) {
342      ASSERT(phis()->length() == incoming_env->length());
343      for (int i = 0; i < phis_.length(); ++i) {
344        phis_[i]->AddInput(incoming_env->values()->at(i));
345      }
346    } else {
347      last_environment()->AddIncomingEdge(this, pred->last_environment());
348    }
349  } else if (!HasEnvironment() && !IsFinished()) {
350    ASSERT(!IsLoopHeader());
351    SetInitialEnvironment(pred->last_environment()->Copy());
352  }
353
354  predecessors_.Add(pred, zone());
355}
356
357
358void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
359  ASSERT(!dominated_blocks_.Contains(block));
360  // Keep the list of dominated blocks sorted such that if there is two
361  // succeeding block in this list, the predecessor is before the successor.
362  int index = 0;
363  while (index < dominated_blocks_.length() &&
364         dominated_blocks_[index]->block_id() < block->block_id()) {
365    ++index;
366  }
367  dominated_blocks_.InsertAt(index, block, zone());
368}
369
370
371void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
372  if (dominator_ == NULL) {
373    dominator_ = other;
374    other->AddDominatedBlock(this);
375  } else if (other->dominator() != NULL) {
376    HBasicBlock* first = dominator_;
377    HBasicBlock* second = other;
378
379    while (first != second) {
380      if (first->block_id() > second->block_id()) {
381        first = first->dominator();
382      } else {
383        second = second->dominator();
384      }
385      ASSERT(first != NULL && second != NULL);
386    }
387
388    if (dominator_ != first) {
389      ASSERT(dominator_->dominated_blocks_.Contains(this));
390      dominator_->dominated_blocks_.RemoveElement(this);
391      dominator_ = first;
392      first->AddDominatedBlock(this);
393    }
394  }
395}
396
397
398void HBasicBlock::AssignLoopSuccessorDominators() {
399  // Mark blocks that dominate all subsequent reachable blocks inside their
400  // loop. Exploit the fact that blocks are sorted in reverse post order. When
401  // the loop is visited in increasing block id order, if the number of
402  // non-loop-exiting successor edges at the dominator_candidate block doesn't
403  // exceed the number of previously encountered predecessor edges, there is no
404  // path from the loop header to any block with higher id that doesn't go
405  // through the dominator_candidate block. In this case, the
406  // dominator_candidate block is guaranteed to dominate all blocks reachable
407  // from it with higher ids.
408  HBasicBlock* last = loop_information()->GetLastBackEdge();
409  int outstanding_successors = 1;  // one edge from the pre-header
410  // Header always dominates everything.
411  MarkAsLoopSuccessorDominator();
412  for (int j = block_id(); j <= last->block_id(); ++j) {
413    HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
414    for (HPredecessorIterator it(dominator_candidate); !it.Done();
415         it.Advance()) {
416      HBasicBlock* predecessor = it.Current();
417      // Don't count back edges.
418      if (predecessor->block_id() < dominator_candidate->block_id()) {
419        outstanding_successors--;
420      }
421    }
422
423    // If more successors than predecessors have been seen in the loop up to
424    // now, it's not possible to guarantee that the current block dominates
425    // all of the blocks with higher IDs. In this case, assume conservatively
426    // that those paths through loop that don't go through the current block
427    // contain all of the loop's dependencies. Also be careful to record
428    // dominator information about the current loop that's being processed,
429    // and not nested loops, which will be processed when
430    // AssignLoopSuccessorDominators gets called on their header.
431    ASSERT(outstanding_successors >= 0);
432    HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
433    if (outstanding_successors == 0 &&
434        (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
435      dominator_candidate->MarkAsLoopSuccessorDominator();
436    }
437    HControlInstruction* end = dominator_candidate->end();
438    for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
439      HBasicBlock* successor = it.Current();
440      // Only count successors that remain inside the loop and don't loop back
441      // to a loop header.
442      if (successor->block_id() > dominator_candidate->block_id() &&
443          successor->block_id() <= last->block_id()) {
444        // Backwards edges must land on loop headers.
445        ASSERT(successor->block_id() > dominator_candidate->block_id() ||
446               successor->IsLoopHeader());
447        outstanding_successors++;
448      }
449    }
450  }
451}
452
453
454int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
455  for (int i = 0; i < predecessors_.length(); ++i) {
456    if (predecessors_[i] == predecessor) return i;
457  }
458  UNREACHABLE();
459  return -1;
460}
461
462
463#ifdef DEBUG
464void HBasicBlock::Verify() {
465  // Check that every block is finished.
466  ASSERT(IsFinished());
467  ASSERT(block_id() >= 0);
468
469  // Check that the incoming edges are in edge split form.
470  if (predecessors_.length() > 1) {
471    for (int i = 0; i < predecessors_.length(); ++i) {
472      ASSERT(predecessors_[i]->end()->SecondSuccessor() == NULL);
473    }
474  }
475}
476#endif
477
478
479void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
480  this->back_edges_.Add(block, block->zone());
481  AddBlock(block);
482}
483
484
485HBasicBlock* HLoopInformation::GetLastBackEdge() const {
486  int max_id = -1;
487  HBasicBlock* result = NULL;
488  for (int i = 0; i < back_edges_.length(); ++i) {
489    HBasicBlock* cur = back_edges_[i];
490    if (cur->block_id() > max_id) {
491      max_id = cur->block_id();
492      result = cur;
493    }
494  }
495  return result;
496}
497
498
499void HLoopInformation::AddBlock(HBasicBlock* block) {
500  if (block == loop_header()) return;
501  if (block->parent_loop_header() == loop_header()) return;
502  if (block->parent_loop_header() != NULL) {
503    AddBlock(block->parent_loop_header());
504  } else {
505    block->set_parent_loop_header(loop_header());
506    blocks_.Add(block, block->zone());
507    for (int i = 0; i < block->predecessors()->length(); ++i) {
508      AddBlock(block->predecessors()->at(i));
509    }
510  }
511}
512
513
514#ifdef DEBUG
515
516// Checks reachability of the blocks in this graph and stores a bit in
517// the BitVector "reachable()" for every block that can be reached
518// from the start block of the graph. If "dont_visit" is non-null, the given
519// block is treated as if it would not be part of the graph. "visited_count()"
520// returns the number of reachable blocks.
521class ReachabilityAnalyzer BASE_EMBEDDED {
522 public:
523  ReachabilityAnalyzer(HBasicBlock* entry_block,
524                       int block_count,
525                       HBasicBlock* dont_visit)
526      : visited_count_(0),
527        stack_(16, entry_block->zone()),
528        reachable_(block_count, entry_block->zone()),
529        dont_visit_(dont_visit) {
530    PushBlock(entry_block);
531    Analyze();
532  }
533
534  int visited_count() const { return visited_count_; }
535  const BitVector* reachable() const { return &reachable_; }
536
537 private:
538  void PushBlock(HBasicBlock* block) {
539    if (block != NULL && block != dont_visit_ &&
540        !reachable_.Contains(block->block_id())) {
541      reachable_.Add(block->block_id());
542      stack_.Add(block, block->zone());
543      visited_count_++;
544    }
545  }
546
547  void Analyze() {
548    while (!stack_.is_empty()) {
549      HControlInstruction* end = stack_.RemoveLast()->end();
550      for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
551        PushBlock(it.Current());
552      }
553    }
554  }
555
556  int visited_count_;
557  ZoneList<HBasicBlock*> stack_;
558  BitVector reachable_;
559  HBasicBlock* dont_visit_;
560};
561
562
563void HGraph::Verify(bool do_full_verify) const {
564  Heap::RelocationLock relocation_lock(isolate()->heap());
565  AllowHandleDereference allow_deref;
566  AllowDeferredHandleDereference allow_deferred_deref;
567  for (int i = 0; i < blocks_.length(); i++) {
568    HBasicBlock* block = blocks_.at(i);
569
570    block->Verify();
571
572    // Check that every block contains at least one node and that only the last
573    // node is a control instruction.
574    HInstruction* current = block->first();
575    ASSERT(current != NULL && current->IsBlockEntry());
576    while (current != NULL) {
577      ASSERT((current->next() == NULL) == current->IsControlInstruction());
578      ASSERT(current->block() == block);
579      current->Verify();
580      current = current->next();
581    }
582
583    // Check that successors are correctly set.
584    HBasicBlock* first = block->end()->FirstSuccessor();
585    HBasicBlock* second = block->end()->SecondSuccessor();
586    ASSERT(second == NULL || first != NULL);
587
588    // Check that the predecessor array is correct.
589    if (first != NULL) {
590      ASSERT(first->predecessors()->Contains(block));
591      if (second != NULL) {
592        ASSERT(second->predecessors()->Contains(block));
593      }
594    }
595
596    // Check that phis have correct arguments.
597    for (int j = 0; j < block->phis()->length(); j++) {
598      HPhi* phi = block->phis()->at(j);
599      phi->Verify();
600    }
601
602    // Check that all join blocks have predecessors that end with an
603    // unconditional goto and agree on their environment node id.
604    if (block->predecessors()->length() >= 2) {
605      BailoutId id =
606          block->predecessors()->first()->last_environment()->ast_id();
607      for (int k = 0; k < block->predecessors()->length(); k++) {
608        HBasicBlock* predecessor = block->predecessors()->at(k);
609        ASSERT(predecessor->end()->IsGoto() ||
610               predecessor->end()->IsDeoptimize());
611        ASSERT(predecessor->last_environment()->ast_id() == id);
612      }
613    }
614  }
615
616  // Check special property of first block to have no predecessors.
617  ASSERT(blocks_.at(0)->predecessors()->is_empty());
618
619  if (do_full_verify) {
620    // Check that the graph is fully connected.
621    ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
622    ASSERT(analyzer.visited_count() == blocks_.length());
623
624    // Check that entry block dominator is NULL.
625    ASSERT(entry_block_->dominator() == NULL);
626
627    // Check dominators.
628    for (int i = 0; i < blocks_.length(); ++i) {
629      HBasicBlock* block = blocks_.at(i);
630      if (block->dominator() == NULL) {
631        // Only start block may have no dominator assigned to.
632        ASSERT(i == 0);
633      } else {
634        // Assert that block is unreachable if dominator must not be visited.
635        ReachabilityAnalyzer dominator_analyzer(entry_block_,
636                                                blocks_.length(),
637                                                block->dominator());
638        ASSERT(!dominator_analyzer.reachable()->Contains(block->block_id()));
639      }
640    }
641  }
642}
643
644#endif
645
646
647HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
648                               int32_t value) {
649  if (!pointer->is_set()) {
650    // Can't pass GetInvalidContext() to HConstant::New, because that will
651    // recursively call GetConstant
652    HConstant* constant = HConstant::New(zone(), NULL, value);
653    constant->InsertAfter(entry_block()->first());
654    pointer->set(constant);
655    return constant;
656  }
657  return ReinsertConstantIfNecessary(pointer->get());
658}
659
660
661HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
662  if (!constant->IsLinked()) {
663    // The constant was removed from the graph. Reinsert.
664    constant->ClearFlag(HValue::kIsDead);
665    constant->InsertAfter(entry_block()->first());
666  }
667  return constant;
668}
669
670
671HConstant* HGraph::GetConstant0() {
672  return GetConstant(&constant_0_, 0);
673}
674
675
676HConstant* HGraph::GetConstant1() {
677  return GetConstant(&constant_1_, 1);
678}
679
680
681HConstant* HGraph::GetConstantMinus1() {
682  return GetConstant(&constant_minus1_, -1);
683}
684
685
686#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value)                  \
687HConstant* HGraph::GetConstant##Name() {                                       \
688  if (!constant_##name##_.is_set()) {                                          \
689    HConstant* constant = new(zone()) HConstant(                               \
690        Unique<Object>::CreateImmovable(isolate()->factory()->name##_value()), \
691        Representation::Tagged(),                                              \
692        htype,                                                                 \
693        false,                                                                 \
694        true,                                                                  \
695        false,                                                                 \
696        boolean_value);                                                        \
697    constant->InsertAfter(entry_block()->first());                             \
698    constant_##name##_.set(constant);                                          \
699  }                                                                            \
700  return ReinsertConstantIfNecessary(constant_##name##_.get());                \
701}
702
703
704DEFINE_GET_CONSTANT(Undefined, undefined, HType::Tagged(), false)
705DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
706DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
707DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
708DEFINE_GET_CONSTANT(Null, null, HType::Tagged(), false)
709
710
711#undef DEFINE_GET_CONSTANT
712
713#define DEFINE_IS_CONSTANT(Name, name)                                         \
714bool HGraph::IsConstant##Name(HConstant* constant) {                           \
715  return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
716}
717DEFINE_IS_CONSTANT(Undefined, undefined)
718DEFINE_IS_CONSTANT(0, 0)
719DEFINE_IS_CONSTANT(1, 1)
720DEFINE_IS_CONSTANT(Minus1, minus1)
721DEFINE_IS_CONSTANT(True, true)
722DEFINE_IS_CONSTANT(False, false)
723DEFINE_IS_CONSTANT(Hole, the_hole)
724DEFINE_IS_CONSTANT(Null, null)
725
726#undef DEFINE_IS_CONSTANT
727
728
729HConstant* HGraph::GetInvalidContext() {
730  return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
731}
732
733
734bool HGraph::IsStandardConstant(HConstant* constant) {
735  if (IsConstantUndefined(constant)) return true;
736  if (IsConstant0(constant)) return true;
737  if (IsConstant1(constant)) return true;
738  if (IsConstantMinus1(constant)) return true;
739  if (IsConstantTrue(constant)) return true;
740  if (IsConstantFalse(constant)) return true;
741  if (IsConstantHole(constant)) return true;
742  if (IsConstantNull(constant)) return true;
743  return false;
744}
745
746
747HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
748    : builder_(builder),
749      finished_(false),
750      did_then_(false),
751      did_else_(false),
752      did_else_if_(false),
753      did_and_(false),
754      did_or_(false),
755      captured_(false),
756      needs_compare_(true),
757      pending_merge_block_(false),
758      split_edge_merge_block_(NULL),
759      merge_at_join_blocks_(NULL),
760      normal_merge_at_join_block_count_(0),
761      deopt_merge_at_join_block_count_(0) {
762  HEnvironment* env = builder->environment();
763  first_true_block_ = builder->CreateBasicBlock(env->Copy());
764  first_false_block_ = builder->CreateBasicBlock(env->Copy());
765}
766
767
768HGraphBuilder::IfBuilder::IfBuilder(
769    HGraphBuilder* builder,
770    HIfContinuation* continuation)
771    : builder_(builder),
772      finished_(false),
773      did_then_(false),
774      did_else_(false),
775      did_else_if_(false),
776      did_and_(false),
777      did_or_(false),
778      captured_(false),
779      needs_compare_(false),
780      pending_merge_block_(false),
781      first_true_block_(NULL),
782      first_false_block_(NULL),
783      split_edge_merge_block_(NULL),
784      merge_at_join_blocks_(NULL),
785      normal_merge_at_join_block_count_(0),
786      deopt_merge_at_join_block_count_(0) {
787  continuation->Continue(&first_true_block_,
788                         &first_false_block_);
789}
790
791
792HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
793    HControlInstruction* compare) {
794  ASSERT(did_then_ == did_else_);
795  if (did_else_) {
796    // Handle if-then-elseif
797    did_else_if_ = true;
798    did_else_ = false;
799    did_then_ = false;
800    did_and_ = false;
801    did_or_ = false;
802    pending_merge_block_ = false;
803    split_edge_merge_block_ = NULL;
804    HEnvironment* env = builder_->environment();
805    first_true_block_ = builder_->CreateBasicBlock(env->Copy());
806    first_false_block_ = builder_->CreateBasicBlock(env->Copy());
807  }
808  if (split_edge_merge_block_ != NULL) {
809    HEnvironment* env = first_false_block_->last_environment();
810    HBasicBlock* split_edge =
811        builder_->CreateBasicBlock(env->Copy());
812    if (did_or_) {
813      compare->SetSuccessorAt(0, split_edge);
814      compare->SetSuccessorAt(1, first_false_block_);
815    } else {
816      compare->SetSuccessorAt(0, first_true_block_);
817      compare->SetSuccessorAt(1, split_edge);
818    }
819    builder_->GotoNoSimulate(split_edge, split_edge_merge_block_);
820  } else {
821    compare->SetSuccessorAt(0, first_true_block_);
822    compare->SetSuccessorAt(1, first_false_block_);
823  }
824  builder_->FinishCurrentBlock(compare);
825  needs_compare_ = false;
826  return compare;
827}
828
829
830void HGraphBuilder::IfBuilder::Or() {
831  ASSERT(!needs_compare_);
832  ASSERT(!did_and_);
833  did_or_ = true;
834  HEnvironment* env = first_false_block_->last_environment();
835  if (split_edge_merge_block_ == NULL) {
836    split_edge_merge_block_ =
837        builder_->CreateBasicBlock(env->Copy());
838    builder_->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
839    first_true_block_ = split_edge_merge_block_;
840  }
841  builder_->set_current_block(first_false_block_);
842  first_false_block_ = builder_->CreateBasicBlock(env->Copy());
843}
844
845
846void HGraphBuilder::IfBuilder::And() {
847  ASSERT(!needs_compare_);
848  ASSERT(!did_or_);
849  did_and_ = true;
850  HEnvironment* env = first_false_block_->last_environment();
851  if (split_edge_merge_block_ == NULL) {
852    split_edge_merge_block_ = builder_->CreateBasicBlock(env->Copy());
853    builder_->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
854    first_false_block_ = split_edge_merge_block_;
855  }
856  builder_->set_current_block(first_true_block_);
857  first_true_block_ = builder_->CreateBasicBlock(env->Copy());
858}
859
860
861void HGraphBuilder::IfBuilder::CaptureContinuation(
862    HIfContinuation* continuation) {
863  ASSERT(!did_else_if_);
864  ASSERT(!finished_);
865  ASSERT(!captured_);
866
867  HBasicBlock* true_block = NULL;
868  HBasicBlock* false_block = NULL;
869  Finish(&true_block, &false_block);
870  ASSERT(true_block != NULL);
871  ASSERT(false_block != NULL);
872  continuation->Capture(true_block, false_block);
873  captured_ = true;
874  builder_->set_current_block(NULL);
875  End();
876}
877
878
879void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
880  ASSERT(!did_else_if_);
881  ASSERT(!finished_);
882  ASSERT(!captured_);
883  HBasicBlock* true_block = NULL;
884  HBasicBlock* false_block = NULL;
885  Finish(&true_block, &false_block);
886  merge_at_join_blocks_ = NULL;
887  if (true_block != NULL && !true_block->IsFinished()) {
888    ASSERT(continuation->IsTrueReachable());
889    builder_->GotoNoSimulate(true_block, continuation->true_branch());
890  }
891  if (false_block != NULL && !false_block->IsFinished()) {
892    ASSERT(continuation->IsFalseReachable());
893    builder_->GotoNoSimulate(false_block, continuation->false_branch());
894  }
895  captured_ = true;
896  End();
897}
898
899
900void HGraphBuilder::IfBuilder::Then() {
901  ASSERT(!captured_);
902  ASSERT(!finished_);
903  did_then_ = true;
904  if (needs_compare_) {
905    // Handle if's without any expressions, they jump directly to the "else"
906    // branch. However, we must pretend that the "then" branch is reachable,
907    // so that the graph builder visits it and sees any live range extending
908    // constructs within it.
909    HConstant* constant_false = builder_->graph()->GetConstantFalse();
910    ToBooleanStub::Types boolean_type = ToBooleanStub::Types();
911    boolean_type.Add(ToBooleanStub::BOOLEAN);
912    HBranch* branch = builder()->New<HBranch>(
913        constant_false, boolean_type, first_true_block_, first_false_block_);
914    builder_->FinishCurrentBlock(branch);
915  }
916  builder_->set_current_block(first_true_block_);
917  pending_merge_block_ = true;
918}
919
920
921void HGraphBuilder::IfBuilder::Else() {
922  ASSERT(did_then_);
923  ASSERT(!captured_);
924  ASSERT(!finished_);
925  AddMergeAtJoinBlock(false);
926  builder_->set_current_block(first_false_block_);
927  pending_merge_block_ = true;
928  did_else_ = true;
929}
930
931
932void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
933  ASSERT(did_then_);
934  builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
935  AddMergeAtJoinBlock(true);
936}
937
938
939void HGraphBuilder::IfBuilder::Return(HValue* value) {
940  HValue* parameter_count = builder_->graph()->GetConstantMinus1();
941  builder_->FinishExitCurrentBlock(
942      builder_->New<HReturn>(value, parameter_count));
943  AddMergeAtJoinBlock(false);
944}
945
946
947void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
948  if (!pending_merge_block_) return;
949  HBasicBlock* block = builder_->current_block();
950  ASSERT(block == NULL || !block->IsFinished());
951  MergeAtJoinBlock* record =
952      new(builder_->zone()) MergeAtJoinBlock(block, deopt,
953                                             merge_at_join_blocks_);
954  merge_at_join_blocks_ = record;
955  if (block != NULL) {
956    ASSERT(block->end() == NULL);
957    if (deopt) {
958      normal_merge_at_join_block_count_++;
959    } else {
960      deopt_merge_at_join_block_count_++;
961    }
962  }
963  builder_->set_current_block(NULL);
964  pending_merge_block_ = false;
965}
966
967
968void HGraphBuilder::IfBuilder::Finish() {
969  ASSERT(!finished_);
970  if (!did_then_) {
971    Then();
972  }
973  AddMergeAtJoinBlock(false);
974  if (!did_else_) {
975    Else();
976    AddMergeAtJoinBlock(false);
977  }
978  finished_ = true;
979}
980
981
982void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
983                                      HBasicBlock** else_continuation) {
984  Finish();
985
986  MergeAtJoinBlock* else_record = merge_at_join_blocks_;
987  if (else_continuation != NULL) {
988    *else_continuation = else_record->block_;
989  }
990  MergeAtJoinBlock* then_record = else_record->next_;
991  if (then_continuation != NULL) {
992    *then_continuation = then_record->block_;
993  }
994  ASSERT(then_record->next_ == NULL);
995}
996
997
998void HGraphBuilder::IfBuilder::End() {
999  if (captured_) return;
1000  Finish();
1001
1002  int total_merged_blocks = normal_merge_at_join_block_count_ +
1003    deopt_merge_at_join_block_count_;
1004  ASSERT(total_merged_blocks >= 1);
1005  HBasicBlock* merge_block = total_merged_blocks == 1
1006      ? NULL : builder_->graph()->CreateBasicBlock();
1007
1008  // Merge non-deopt blocks first to ensure environment has right size for
1009  // padding.
1010  MergeAtJoinBlock* current = merge_at_join_blocks_;
1011  while (current != NULL) {
1012    if (!current->deopt_ && current->block_ != NULL) {
1013      // If there is only one block that makes it through to the end of the
1014      // if, then just set it as the current block and continue rather then
1015      // creating an unnecessary merge block.
1016      if (total_merged_blocks == 1) {
1017        builder_->set_current_block(current->block_);
1018        return;
1019      }
1020      builder_->GotoNoSimulate(current->block_, merge_block);
1021    }
1022    current = current->next_;
1023  }
1024
1025  // Merge deopt blocks, padding when necessary.
1026  current = merge_at_join_blocks_;
1027  while (current != NULL) {
1028    if (current->deopt_ && current->block_ != NULL) {
1029      builder_->PadEnvironmentForContinuation(current->block_,
1030                                              merge_block);
1031      builder_->GotoNoSimulate(current->block_, merge_block);
1032    }
1033    current = current->next_;
1034  }
1035  builder_->set_current_block(merge_block);
1036}
1037
1038
1039HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1040                                        HValue* context,
1041                                        LoopBuilder::Direction direction)
1042    : builder_(builder),
1043      context_(context),
1044      direction_(direction),
1045      finished_(false) {
1046  header_block_ = builder->CreateLoopHeaderBlock();
1047  body_block_ = NULL;
1048  exit_block_ = NULL;
1049  exit_trampoline_block_ = NULL;
1050  increment_amount_ = builder_->graph()->GetConstant1();
1051}
1052
1053
1054HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder,
1055                                        HValue* context,
1056                                        LoopBuilder::Direction direction,
1057                                        HValue* increment_amount)
1058    : builder_(builder),
1059      context_(context),
1060      direction_(direction),
1061      finished_(false) {
1062  header_block_ = builder->CreateLoopHeaderBlock();
1063  body_block_ = NULL;
1064  exit_block_ = NULL;
1065  exit_trampoline_block_ = NULL;
1066  increment_amount_ = increment_amount;
1067}
1068
1069
1070HValue* HGraphBuilder::LoopBuilder::BeginBody(
1071    HValue* initial,
1072    HValue* terminating,
1073    Token::Value token) {
1074  HEnvironment* env = builder_->environment();
1075  phi_ = header_block_->AddNewPhi(env->values()->length());
1076  phi_->AddInput(initial);
1077  env->Push(initial);
1078  builder_->GotoNoSimulate(header_block_);
1079
1080  HEnvironment* body_env = env->Copy();
1081  HEnvironment* exit_env = env->Copy();
1082  // Remove the phi from the expression stack
1083  body_env->Pop();
1084  exit_env->Pop();
1085  body_block_ = builder_->CreateBasicBlock(body_env);
1086  exit_block_ = builder_->CreateBasicBlock(exit_env);
1087
1088  builder_->set_current_block(header_block_);
1089  env->Pop();
1090  builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1091          phi_, terminating, token, body_block_, exit_block_));
1092
1093  builder_->set_current_block(body_block_);
1094  if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1095    HValue* one = builder_->graph()->GetConstant1();
1096    if (direction_ == kPreIncrement) {
1097      increment_ = HAdd::New(zone(), context_, phi_, one);
1098    } else {
1099      increment_ = HSub::New(zone(), context_, phi_, one);
1100    }
1101    increment_->ClearFlag(HValue::kCanOverflow);
1102    builder_->AddInstruction(increment_);
1103    return increment_;
1104  } else {
1105    return phi_;
1106  }
1107}
1108
1109
1110void HGraphBuilder::LoopBuilder::Break() {
1111  if (exit_trampoline_block_ == NULL) {
1112    // Its the first time we saw a break.
1113    HEnvironment* env = exit_block_->last_environment()->Copy();
1114    exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1115    builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1116  }
1117
1118  builder_->GotoNoSimulate(exit_trampoline_block_);
1119  builder_->set_current_block(NULL);
1120}
1121
1122
1123void HGraphBuilder::LoopBuilder::EndBody() {
1124  ASSERT(!finished_);
1125
1126  if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1127    if (direction_ == kPostIncrement) {
1128      increment_ = HAdd::New(zone(), context_, phi_, increment_amount_);
1129    } else {
1130      increment_ = HSub::New(zone(), context_, phi_, increment_amount_);
1131    }
1132    increment_->ClearFlag(HValue::kCanOverflow);
1133    builder_->AddInstruction(increment_);
1134  }
1135
1136  // Push the new increment value on the expression stack to merge into the phi.
1137  builder_->environment()->Push(increment_);
1138  HBasicBlock* last_block = builder_->current_block();
1139  builder_->GotoNoSimulate(last_block, header_block_);
1140  header_block_->loop_information()->RegisterBackEdge(last_block);
1141
1142  if (exit_trampoline_block_ != NULL) {
1143    builder_->set_current_block(exit_trampoline_block_);
1144  } else {
1145    builder_->set_current_block(exit_block_);
1146  }
1147  finished_ = true;
1148}
1149
1150
1151HGraph* HGraphBuilder::CreateGraph() {
1152  graph_ = new(zone()) HGraph(info_);
1153  if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1154  CompilationPhase phase("H_Block building", info_);
1155  set_current_block(graph()->entry_block());
1156  if (!BuildGraph()) return NULL;
1157  graph()->FinalizeUniqueness();
1158  return graph_;
1159}
1160
1161
1162HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1163  ASSERT(current_block() != NULL);
1164  ASSERT(!FLAG_emit_opt_code_positions ||
1165         position_ != RelocInfo::kNoPosition || !info_->IsOptimizing());
1166  current_block()->AddInstruction(instr, position_);
1167  if (graph()->IsInsideNoSideEffectsScope()) {
1168    instr->SetFlag(HValue::kHasNoObservableSideEffects);
1169  }
1170  return instr;
1171}
1172
1173
1174void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1175  ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() ||
1176         position_ != RelocInfo::kNoPosition);
1177  current_block()->Finish(last, position_);
1178  if (last->IsReturn() || last->IsAbnormalExit()) {
1179    set_current_block(NULL);
1180  }
1181}
1182
1183
1184void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1185  ASSERT(!FLAG_emit_opt_code_positions || !info_->IsOptimizing() ||
1186         position_ != RelocInfo::kNoPosition);
1187  current_block()->FinishExit(instruction, position_);
1188  if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1189    set_current_block(NULL);
1190  }
1191}
1192
1193
1194void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1195  if (FLAG_native_code_counters && counter->Enabled()) {
1196    HValue* reference = Add<HConstant>(ExternalReference(counter));
1197    HValue* old_value = Add<HLoadNamedField>(reference,
1198                                             HObjectAccess::ForCounter());
1199    HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1200    new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
1201    Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1202                          new_value);
1203  }
1204}
1205
1206
1207void HGraphBuilder::AddSimulate(BailoutId id,
1208                                RemovableSimulate removable) {
1209  ASSERT(current_block() != NULL);
1210  ASSERT(!graph()->IsInsideNoSideEffectsScope());
1211  current_block()->AddNewSimulate(id, removable);
1212}
1213
1214
1215HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1216  HBasicBlock* b = graph()->CreateBasicBlock();
1217  b->SetInitialEnvironment(env);
1218  return b;
1219}
1220
1221
1222HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1223  HBasicBlock* header = graph()->CreateBasicBlock();
1224  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1225  header->SetInitialEnvironment(entry_env);
1226  header->AttachLoopInformation();
1227  return header;
1228}
1229
1230
1231HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1232  if (obj->type().IsHeapObject()) return obj;
1233  return Add<HCheckHeapObject>(obj);
1234}
1235
1236
1237void HGraphBuilder::FinishExitWithHardDeoptimization(
1238    const char* reason, HBasicBlock* continuation) {
1239  PadEnvironmentForContinuation(current_block(), continuation);
1240  Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1241  if (graph()->IsInsideNoSideEffectsScope()) {
1242    GotoNoSimulate(continuation);
1243  } else {
1244    Goto(continuation);
1245  }
1246}
1247
1248
1249void HGraphBuilder::PadEnvironmentForContinuation(
1250    HBasicBlock* from,
1251    HBasicBlock* continuation) {
1252  if (continuation->last_environment() != NULL) {
1253    // When merging from a deopt block to a continuation, resolve differences in
1254    // environment by pushing constant 0 and popping extra values so that the
1255    // environments match during the join. Push 0 since it has the most specific
1256    // representation, and will not influence representation inference of the
1257    // phi.
1258    int continuation_env_length = continuation->last_environment()->length();
1259    while (continuation_env_length != from->last_environment()->length()) {
1260      if (continuation_env_length > from->last_environment()->length()) {
1261        from->last_environment()->Push(graph()->GetConstant0());
1262      } else {
1263        from->last_environment()->Pop();
1264      }
1265    }
1266  } else {
1267    ASSERT(continuation->predecessors()->length() == 0);
1268  }
1269}
1270
1271
1272HValue* HGraphBuilder::BuildCheckMap(HValue* obj, Handle<Map> map) {
1273  return Add<HCheckMaps>(obj, map, top_info());
1274}
1275
1276
1277HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1278  if (!string->type().IsString()) {
1279    ASSERT(!string->IsConstant() ||
1280           !HConstant::cast(string)->HasStringValue());
1281    BuildCheckHeapObject(string);
1282    return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1283  }
1284  return string;
1285}
1286
1287
1288HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* function) {
1289  if (object->type().IsJSObject()) return object;
1290  return Add<HWrapReceiver>(object, function);
1291}
1292
1293
1294HValue* HGraphBuilder::BuildCheckForCapacityGrow(HValue* object,
1295                                                 HValue* elements,
1296                                                 ElementsKind kind,
1297                                                 HValue* length,
1298                                                 HValue* key,
1299                                                 bool is_js_array) {
1300  IfBuilder length_checker(this);
1301
1302  Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1303  length_checker.If<HCompareNumericAndBranch>(key, length, token);
1304
1305  length_checker.Then();
1306
1307  HValue* current_capacity = AddLoadFixedArrayLength(elements);
1308
1309  IfBuilder capacity_checker(this);
1310
1311  capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1312                                                Token::GTE);
1313  capacity_checker.Then();
1314
1315  HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1316  HValue* max_capacity = AddUncasted<HAdd>(current_capacity, max_gap);
1317  IfBuilder key_checker(this);
1318  key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT);
1319  key_checker.Then();
1320  key_checker.ElseDeopt("Key out of capacity range");
1321  key_checker.End();
1322
1323  HValue* new_capacity = BuildNewElementsCapacity(key);
1324  HValue* new_elements = BuildGrowElementsCapacity(object, elements,
1325                                                   kind, kind, length,
1326                                                   new_capacity);
1327
1328  environment()->Push(new_elements);
1329  capacity_checker.Else();
1330
1331  environment()->Push(elements);
1332  capacity_checker.End();
1333
1334  if (is_js_array) {
1335    HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1336    new_length->ClearFlag(HValue::kCanOverflow);
1337
1338    Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1339                          new_length);
1340  }
1341
1342  length_checker.Else();
1343  Add<HBoundsCheck>(key, length);
1344
1345  environment()->Push(elements);
1346  length_checker.End();
1347
1348  return environment()->Pop();
1349}
1350
1351
1352HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1353                                                HValue* elements,
1354                                                ElementsKind kind,
1355                                                HValue* length) {
1356  Factory* factory = isolate()->factory();
1357
1358  IfBuilder cow_checker(this);
1359
1360  cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1361  cow_checker.Then();
1362
1363  HValue* capacity = AddLoadFixedArrayLength(elements);
1364
1365  HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1366                                                   kind, length, capacity);
1367
1368  environment()->Push(new_elements);
1369
1370  cow_checker.Else();
1371
1372  environment()->Push(elements);
1373
1374  cow_checker.End();
1375
1376  return environment()->Pop();
1377}
1378
1379
1380void HGraphBuilder::BuildTransitionElementsKind(HValue* object,
1381                                                HValue* map,
1382                                                ElementsKind from_kind,
1383                                                ElementsKind to_kind,
1384                                                bool is_jsarray) {
1385  ASSERT(!IsFastHoleyElementsKind(from_kind) ||
1386         IsFastHoleyElementsKind(to_kind));
1387
1388  if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
1389    Add<HTrapAllocationMemento>(object);
1390  }
1391
1392  if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
1393    HInstruction* elements = AddLoadElements(object);
1394
1395    HInstruction* empty_fixed_array = Add<HConstant>(
1396        isolate()->factory()->empty_fixed_array());
1397
1398    IfBuilder if_builder(this);
1399
1400    if_builder.IfNot<HCompareObjectEqAndBranch>(elements, empty_fixed_array);
1401
1402    if_builder.Then();
1403
1404    HInstruction* elements_length = AddLoadFixedArrayLength(elements);
1405
1406    HInstruction* array_length = is_jsarray
1407        ? Add<HLoadNamedField>(object, HObjectAccess::ForArrayLength(from_kind))
1408        : elements_length;
1409
1410    BuildGrowElementsCapacity(object, elements, from_kind, to_kind,
1411                              array_length, elements_length);
1412
1413    if_builder.End();
1414  }
1415
1416  Add<HStoreNamedField>(object, HObjectAccess::ForMap(), map);
1417}
1418
1419
1420HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoadHelper(
1421    HValue* elements,
1422    HValue* key,
1423    HValue* hash,
1424    HValue* mask,
1425    int current_probe) {
1426  if (current_probe == kNumberDictionaryProbes) {
1427    return NULL;
1428  }
1429
1430  int32_t offset = SeededNumberDictionary::GetProbeOffset(current_probe);
1431  HValue* raw_index = (current_probe == 0)
1432      ? hash
1433      : AddUncasted<HAdd>(hash, Add<HConstant>(offset));
1434  raw_index = AddUncasted<HBitwise>(Token::BIT_AND, raw_index, mask);
1435  int32_t entry_size = SeededNumberDictionary::kEntrySize;
1436  raw_index = AddUncasted<HMul>(raw_index, Add<HConstant>(entry_size));
1437  raw_index->ClearFlag(HValue::kCanOverflow);
1438
1439  int32_t base_offset = SeededNumberDictionary::kElementsStartIndex;
1440  HValue* key_index = AddUncasted<HAdd>(raw_index, Add<HConstant>(base_offset));
1441  key_index->ClearFlag(HValue::kCanOverflow);
1442
1443  HValue* candidate_key = Add<HLoadKeyed>(elements, key_index,
1444                                          static_cast<HValue*>(NULL),
1445                                          FAST_SMI_ELEMENTS);
1446
1447  IfBuilder key_compare(this);
1448  key_compare.IfNot<HCompareObjectEqAndBranch>(key, candidate_key);
1449  key_compare.Then();
1450  {
1451    // Key at the current probe doesn't match, try at the next probe.
1452    HValue* result = BuildUncheckedDictionaryElementLoadHelper(
1453        elements, key, hash, mask, current_probe + 1);
1454    if (result == NULL) {
1455      key_compare.Deopt("probes exhausted in keyed load dictionary lookup");
1456      result = graph()->GetConstantUndefined();
1457    } else {
1458      Push(result);
1459    }
1460  }
1461  key_compare.Else();
1462  {
1463    // Key at current probe matches. Details must be zero, otherwise the
1464    // dictionary element requires special handling.
1465    HValue* details_index = AddUncasted<HAdd>(
1466        raw_index, Add<HConstant>(base_offset + 2));
1467    details_index->ClearFlag(HValue::kCanOverflow);
1468
1469    HValue* details = Add<HLoadKeyed>(elements, details_index,
1470                                      static_cast<HValue*>(NULL),
1471                                      FAST_SMI_ELEMENTS);
1472    IfBuilder details_compare(this);
1473    details_compare.If<HCompareNumericAndBranch>(details,
1474                                                 graph()->GetConstant0(),
1475                                                 Token::NE);
1476    details_compare.ThenDeopt("keyed load dictionary element not fast case");
1477
1478    details_compare.Else();
1479    {
1480      // Key matches and details are zero --> fast case. Load and return the
1481      // value.
1482      HValue* result_index = AddUncasted<HAdd>(
1483          raw_index, Add<HConstant>(base_offset + 1));
1484      result_index->ClearFlag(HValue::kCanOverflow);
1485
1486      Push(Add<HLoadKeyed>(elements, result_index,
1487                           static_cast<HValue*>(NULL),
1488                           FAST_ELEMENTS));
1489    }
1490    details_compare.End();
1491  }
1492  key_compare.End();
1493
1494  return Pop();
1495}
1496
1497
1498HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1499  int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1500  HValue* seed = Add<HConstant>(seed_value);
1501  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1502
1503  // hash = ~hash + (hash << 15);
1504  HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1505  HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1506                                           graph()->GetConstantMinus1());
1507  hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1508
1509  // hash = hash ^ (hash >> 12);
1510  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1511  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1512
1513  // hash = hash + (hash << 2);
1514  shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1515  hash = AddUncasted<HAdd>(hash, shifted_hash);
1516
1517  // hash = hash ^ (hash >> 4);
1518  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1519  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1520
1521  // hash = hash * 2057;
1522  hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1523  hash->ClearFlag(HValue::kCanOverflow);
1524
1525  // hash = hash ^ (hash >> 16);
1526  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1527  return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1528}
1529
1530
1531HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1532                                                           HValue* key) {
1533  HValue* elements = AddLoadElements(receiver);
1534
1535  HValue* hash = BuildElementIndexHash(key);
1536
1537  HValue* capacity = Add<HLoadKeyed>(
1538      elements,
1539      Add<HConstant>(NameDictionary::kCapacityIndex),
1540      static_cast<HValue*>(NULL),
1541      FAST_SMI_ELEMENTS);
1542
1543  HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1544  mask->ChangeRepresentation(Representation::Integer32());
1545  mask->ClearFlag(HValue::kCanOverflow);
1546
1547  return BuildUncheckedDictionaryElementLoadHelper(elements, key,
1548                                                   hash, mask, 0);
1549}
1550
1551
1552HValue* HGraphBuilder::BuildNumberToString(HValue* object,
1553                                           Handle<Type> type) {
1554  NoObservableSideEffectsScope scope(this);
1555
1556  // Convert constant numbers at compile time.
1557  if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1558    Handle<Object> number = HConstant::cast(object)->handle(isolate());
1559    Handle<String> result = isolate()->factory()->NumberToString(number);
1560    return Add<HConstant>(result);
1561  }
1562
1563  // Create a joinable continuation.
1564  HIfContinuation found(graph()->CreateBasicBlock(),
1565                        graph()->CreateBasicBlock());
1566
1567  // Load the number string cache.
1568  HValue* number_string_cache =
1569      Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1570
1571  // Make the hash mask from the length of the number string cache. It
1572  // contains two elements (number and string) for each cache entry.
1573  HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1574  mask->set_type(HType::Smi());
1575  mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1576  mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1577
1578  // Check whether object is a smi.
1579  IfBuilder if_objectissmi(this);
1580  if_objectissmi.If<HIsSmiAndBranch>(object);
1581  if_objectissmi.Then();
1582  {
1583    // Compute hash for smi similar to smi_get_hash().
1584    HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1585
1586    // Load the key.
1587    HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1588    HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1589                                  static_cast<HValue*>(NULL),
1590                                  FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1591
1592    // Check if object == key.
1593    IfBuilder if_objectiskey(this);
1594    if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1595    if_objectiskey.Then();
1596    {
1597      // Make the key_index available.
1598      Push(key_index);
1599    }
1600    if_objectiskey.JoinContinuation(&found);
1601  }
1602  if_objectissmi.Else();
1603  {
1604    if (type->Is(Type::Smi())) {
1605      if_objectissmi.Deopt("Expected smi");
1606    } else {
1607      // Check if the object is a heap number.
1608      IfBuilder if_objectisnumber(this);
1609      if_objectisnumber.If<HCompareMap>(
1610          object, isolate()->factory()->heap_number_map());
1611      if_objectisnumber.Then();
1612      {
1613        // Compute hash for heap number similar to double_get_hash().
1614        HValue* low = Add<HLoadNamedField>(
1615            object, HObjectAccess::ForHeapNumberValueLowestBits());
1616        HValue* high = Add<HLoadNamedField>(
1617            object, HObjectAccess::ForHeapNumberValueHighestBits());
1618        HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1619        hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1620
1621        // Load the key.
1622        HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1623        HValue* key = Add<HLoadKeyed>(number_string_cache, key_index,
1624                                      static_cast<HValue*>(NULL),
1625                                      FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1626
1627        // Check if key is a heap number (the number string cache contains only
1628        // SMIs and heap number, so it is sufficient to do a SMI check here).
1629        IfBuilder if_keyisnotsmi(this);
1630        if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1631        if_keyisnotsmi.Then();
1632        {
1633          // Check if values of key and object match.
1634          IfBuilder if_keyeqobject(this);
1635          if_keyeqobject.If<HCompareNumericAndBranch>(
1636              Add<HLoadNamedField>(key, HObjectAccess::ForHeapNumberValue()),
1637              Add<HLoadNamedField>(object, HObjectAccess::ForHeapNumberValue()),
1638              Token::EQ);
1639          if_keyeqobject.Then();
1640          {
1641            // Make the key_index available.
1642            Push(key_index);
1643          }
1644          if_keyeqobject.JoinContinuation(&found);
1645        }
1646        if_keyisnotsmi.JoinContinuation(&found);
1647      }
1648      if_objectisnumber.Else();
1649      {
1650        if (type->Is(Type::Number())) {
1651          if_objectisnumber.Deopt("Expected heap number");
1652        }
1653      }
1654      if_objectisnumber.JoinContinuation(&found);
1655    }
1656  }
1657  if_objectissmi.JoinContinuation(&found);
1658
1659  // Check for cache hit.
1660  IfBuilder if_found(this, &found);
1661  if_found.Then();
1662  {
1663    // Count number to string operation in native code.
1664    AddIncrementCounter(isolate()->counters()->number_to_string_native());
1665
1666    // Load the value in case of cache hit.
1667    HValue* key_index = Pop();
1668    HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1669    Push(Add<HLoadKeyed>(number_string_cache, value_index,
1670                         static_cast<HValue*>(NULL),
1671                         FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1672  }
1673  if_found.Else();
1674  {
1675    // Cache miss, fallback to runtime.
1676    Add<HPushArgument>(object);
1677    Push(Add<HCallRuntime>(
1678            isolate()->factory()->empty_string(),
1679            Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
1680            1));
1681  }
1682  if_found.End();
1683
1684  return Pop();
1685}
1686
1687
1688HValue* HGraphBuilder::BuildSeqStringSizeFor(HValue* length,
1689                                             String::Encoding encoding) {
1690  STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
1691  HValue* size = length;
1692  if (encoding == String::TWO_BYTE_ENCODING) {
1693    size = AddUncasted<HShl>(length, graph()->GetConstant1());
1694    size->ClearFlag(HValue::kCanOverflow);
1695    size->SetFlag(HValue::kUint32);
1696  }
1697  size = AddUncasted<HAdd>(size, Add<HConstant>(static_cast<int32_t>(
1698              SeqString::kHeaderSize + kObjectAlignmentMask)));
1699  size->ClearFlag(HValue::kCanOverflow);
1700  size = AddUncasted<HBitwise>(
1701      Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
1702              ~kObjectAlignmentMask)));
1703  return size;
1704}
1705
1706
1707void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
1708                                            HValue* src_offset,
1709                                            String::Encoding src_encoding,
1710                                            HValue* dst,
1711                                            HValue* dst_offset,
1712                                            String::Encoding dst_encoding,
1713                                            HValue* length) {
1714  ASSERT(dst_encoding != String::ONE_BYTE_ENCODING ||
1715         src_encoding == String::ONE_BYTE_ENCODING);
1716  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
1717  HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
1718  {
1719    HValue* src_index = AddUncasted<HAdd>(src_offset, index);
1720    HValue* value =
1721        AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
1722    HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
1723    Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
1724  }
1725  loop.EndBody();
1726}
1727
1728
1729HValue* HGraphBuilder::BuildUncheckedStringAdd(HValue* left,
1730                                               HValue* right,
1731                                               PretenureFlag pretenure_flag) {
1732  // Determine the string lengths.
1733  HValue* left_length = Add<HLoadNamedField>(
1734      left, HObjectAccess::ForStringLength());
1735  HValue* right_length = Add<HLoadNamedField>(
1736      right, HObjectAccess::ForStringLength());
1737
1738  // Compute the combined string length. If the result is larger than the max
1739  // supported string length, we bailout to the runtime. This is done implicitly
1740  // when converting the result back to a smi in case the max string length
1741  // equals the max smi valie. Otherwise, for platforms with 32-bit smis, we do
1742  HValue* length = AddUncasted<HAdd>(left_length, right_length);
1743  STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
1744  if (String::kMaxLength != Smi::kMaxValue) {
1745    IfBuilder if_nooverflow(this);
1746    if_nooverflow.If<HCompareNumericAndBranch>(
1747        length, Add<HConstant>(String::kMaxLength), Token::LTE);
1748    if_nooverflow.Then();
1749    if_nooverflow.ElseDeopt("String length exceeds limit");
1750  }
1751
1752  // Determine the string instance types.
1753  HLoadNamedField* left_instance_type = Add<HLoadNamedField>(
1754      Add<HLoadNamedField>(left, HObjectAccess::ForMap()),
1755      HObjectAccess::ForMapInstanceType());
1756  HLoadNamedField* right_instance_type = Add<HLoadNamedField>(
1757      Add<HLoadNamedField>(right, HObjectAccess::ForMap()),
1758      HObjectAccess::ForMapInstanceType());
1759
1760  // Compute difference of instance types.
1761  HValue* xored_instance_types = AddUncasted<HBitwise>(
1762      Token::BIT_XOR, left_instance_type, right_instance_type);
1763
1764  // Check if we should create a cons string.
1765  IfBuilder if_createcons(this);
1766  if_createcons.If<HCompareNumericAndBranch>(
1767      length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
1768  if_createcons.Then();
1769  {
1770    // Allocate the cons string object. HAllocate does not care whether we
1771    // pass CONS_STRING_TYPE or CONS_ASCII_STRING_TYPE here, so we just use
1772    // CONS_STRING_TYPE here. Below we decide whether the cons string is
1773    // one-byte or two-byte and set the appropriate map.
1774    HAllocate* string = Add<HAllocate>(Add<HConstant>(ConsString::kSize),
1775                                       HType::String(), pretenure_flag,
1776                                       CONS_STRING_TYPE);
1777
1778    // Compute the intersection of instance types.
1779    HValue* anded_instance_types = AddUncasted<HBitwise>(
1780        Token::BIT_AND, left_instance_type, right_instance_type);
1781
1782    // We create a one-byte cons string if
1783    // 1. both strings are one-byte, or
1784    // 2. at least one of the strings is two-byte, but happens to contain only
1785    //    one-byte characters.
1786    // To do this, we check
1787    // 1. if both strings are one-byte, or if the one-byte data hint is set in
1788    //    both strings, or
1789    // 2. if one of the strings has the one-byte data hint set and the other
1790    //    string is one-byte.
1791    IfBuilder if_onebyte(this);
1792    STATIC_ASSERT(kOneByteStringTag != 0);
1793    STATIC_ASSERT(kOneByteDataHintMask != 0);
1794    if_onebyte.If<HCompareNumericAndBranch>(
1795        AddUncasted<HBitwise>(
1796            Token::BIT_AND, anded_instance_types,
1797            Add<HConstant>(static_cast<int32_t>(
1798                    kStringEncodingMask | kOneByteDataHintMask))),
1799        graph()->GetConstant0(), Token::NE);
1800    if_onebyte.Or();
1801    STATIC_ASSERT(kOneByteStringTag != 0 &&
1802                  kOneByteDataHintTag != 0 &&
1803                  kOneByteDataHintTag != kOneByteStringTag);
1804    if_onebyte.If<HCompareNumericAndBranch>(
1805        AddUncasted<HBitwise>(
1806            Token::BIT_AND, xored_instance_types,
1807            Add<HConstant>(static_cast<int32_t>(
1808                    kOneByteStringTag | kOneByteDataHintTag))),
1809        Add<HConstant>(static_cast<int32_t>(
1810                kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
1811    if_onebyte.Then();
1812    {
1813      // We can safely skip the write barrier for storing the map here.
1814      Handle<Map> map = isolate()->factory()->cons_ascii_string_map();
1815      AddStoreMapConstantNoWriteBarrier(string, map);
1816    }
1817    if_onebyte.Else();
1818    {
1819      // We can safely skip the write barrier for storing the map here.
1820      Handle<Map> map = isolate()->factory()->cons_string_map();
1821      AddStoreMapConstantNoWriteBarrier(string, map);
1822    }
1823    if_onebyte.End();
1824
1825    // Initialize the cons string fields.
1826    Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(),
1827                          Add<HConstant>(String::kEmptyHashField));
1828    Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(), length);
1829    Add<HStoreNamedField>(string, HObjectAccess::ForConsStringFirst(), left);
1830    Add<HStoreNamedField>(string, HObjectAccess::ForConsStringSecond(),
1831                          right);
1832
1833    // Count the native string addition.
1834    AddIncrementCounter(isolate()->counters()->string_add_native());
1835
1836    // Cons string is result.
1837    Push(string);
1838  }
1839  if_createcons.Else();
1840  {
1841    // Compute union of instance types.
1842    HValue* ored_instance_types = AddUncasted<HBitwise>(
1843        Token::BIT_OR, left_instance_type, right_instance_type);
1844
1845    // Check if both strings have the same encoding and both are
1846    // sequential.
1847    IfBuilder if_sameencodingandsequential(this);
1848    if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1849        AddUncasted<HBitwise>(
1850            Token::BIT_AND, xored_instance_types,
1851            Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1852        graph()->GetConstant0(), Token::EQ);
1853    if_sameencodingandsequential.And();
1854    STATIC_ASSERT(kSeqStringTag == 0);
1855    if_sameencodingandsequential.If<HCompareNumericAndBranch>(
1856        AddUncasted<HBitwise>(
1857            Token::BIT_AND, ored_instance_types,
1858            Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
1859        graph()->GetConstant0(), Token::EQ);
1860    if_sameencodingandsequential.Then();
1861    {
1862      // Check if the result is a one-byte string.
1863      IfBuilder if_onebyte(this);
1864      STATIC_ASSERT(kOneByteStringTag != 0);
1865      if_onebyte.If<HCompareNumericAndBranch>(
1866          AddUncasted<HBitwise>(
1867              Token::BIT_AND, ored_instance_types,
1868              Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
1869          graph()->GetConstant0(), Token::NE);
1870      if_onebyte.Then();
1871      {
1872        // Calculate the number of bytes needed for the characters in the
1873        // string while observing object alignment.
1874        HValue* size = BuildSeqStringSizeFor(
1875            length, String::ONE_BYTE_ENCODING);
1876
1877        // Allocate the ASCII string object.
1878        Handle<Map> map = isolate()->factory()->ascii_string_map();
1879        HAllocate* string = Add<HAllocate>(size, HType::String(),
1880                                           pretenure_flag, ASCII_STRING_TYPE);
1881        string->set_known_initial_map(map);
1882
1883        // We can safely skip the write barrier for storing map here.
1884        AddStoreMapConstantNoWriteBarrier(string, map);
1885
1886        // Length must be stored into the string before we copy characters to
1887        // make debug verification code happy.
1888        Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(),
1889                              length);
1890
1891        // Copy bytes from the left string.
1892        BuildCopySeqStringChars(
1893            left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
1894            string, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
1895            left_length);
1896
1897        // Copy bytes from the right string.
1898        BuildCopySeqStringChars(
1899            right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING,
1900            string, left_length, String::ONE_BYTE_ENCODING,
1901            right_length);
1902
1903        // Count the native string addition.
1904        AddIncrementCounter(isolate()->counters()->string_add_native());
1905
1906        // Return the string.
1907        Push(string);
1908      }
1909      if_onebyte.Else();
1910      {
1911        // Calculate the number of bytes needed for the characters in the
1912        // string while observing object alignment.
1913        HValue* size = BuildSeqStringSizeFor(
1914            length, String::TWO_BYTE_ENCODING);
1915
1916        // Allocate the two-byte string object.
1917        Handle<Map> map = isolate()->factory()->string_map();
1918        HAllocate* string = Add<HAllocate>(size, HType::String(),
1919                                           pretenure_flag, STRING_TYPE);
1920        string->set_known_initial_map(map);
1921
1922        // We can safely skip the write barrier for storing map here.
1923        AddStoreMapConstantNoWriteBarrier(string, map);
1924
1925        // Length must be stored into the string before we copy characters to
1926        // make debug verification code happy.
1927        Add<HStoreNamedField>(string, HObjectAccess::ForStringLength(),
1928                              length);
1929
1930        // Copy bytes from the left string.
1931        BuildCopySeqStringChars(
1932            left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
1933            string, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
1934            left_length);
1935
1936        // Copy bytes from the right string.
1937        BuildCopySeqStringChars(
1938            right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING,
1939            string, left_length, String::TWO_BYTE_ENCODING,
1940            right_length);
1941
1942        // Return the string.
1943        Push(string);
1944      }
1945      if_onebyte.End();
1946
1947      // Initialize the (common) string fields.
1948      HValue* string = Pop();
1949      Add<HStoreNamedField>(string, HObjectAccess::ForStringHashField(),
1950                            Add<HConstant>(String::kEmptyHashField));
1951
1952      // Count the native string addition.
1953      AddIncrementCounter(isolate()->counters()->string_add_native());
1954
1955      Push(string);
1956    }
1957    if_sameencodingandsequential.Else();
1958    {
1959      // Fallback to the runtime to add the two strings.
1960      Add<HPushArgument>(left);
1961      Add<HPushArgument>(right);
1962      Push(Add<HCallRuntime>(isolate()->factory()->empty_string(),
1963                             Runtime::FunctionForId(Runtime::kStringAdd),
1964                             2));
1965    }
1966    if_sameencodingandsequential.End();
1967  }
1968  if_createcons.End();
1969
1970  return Pop();
1971}
1972
1973
1974HValue* HGraphBuilder::BuildStringAdd(HValue* left,
1975                                      HValue* right,
1976                                      PretenureFlag pretenure_flag) {
1977  // Determine the string lengths.
1978  HValue* left_length = Add<HLoadNamedField>(
1979      left, HObjectAccess::ForStringLength());
1980  HValue* right_length = Add<HLoadNamedField>(
1981      right, HObjectAccess::ForStringLength());
1982
1983  // Check if left string is empty.
1984  IfBuilder if_leftisempty(this);
1985  if_leftisempty.If<HCompareNumericAndBranch>(
1986      left_length, graph()->GetConstant0(), Token::EQ);
1987  if_leftisempty.Then();
1988  {
1989    // Count the native string addition.
1990    AddIncrementCounter(isolate()->counters()->string_add_native());
1991
1992    // Just return the right string.
1993    Push(right);
1994  }
1995  if_leftisempty.Else();
1996  {
1997    // Check if right string is empty.
1998    IfBuilder if_rightisempty(this);
1999    if_rightisempty.If<HCompareNumericAndBranch>(
2000        right_length, graph()->GetConstant0(), Token::EQ);
2001    if_rightisempty.Then();
2002    {
2003      // Count the native string addition.
2004      AddIncrementCounter(isolate()->counters()->string_add_native());
2005
2006      // Just return the left string.
2007      Push(left);
2008    }
2009    if_rightisempty.Else();
2010    {
2011      // Concatenate the two non-empty strings.
2012      Push(BuildUncheckedStringAdd(left, right, pretenure_flag));
2013    }
2014    if_rightisempty.End();
2015  }
2016  if_leftisempty.End();
2017
2018  return Pop();
2019}
2020
2021
2022HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2023    HValue* checked_object,
2024    HValue* key,
2025    HValue* val,
2026    bool is_js_array,
2027    ElementsKind elements_kind,
2028    bool is_store,
2029    LoadKeyedHoleMode load_mode,
2030    KeyedAccessStoreMode store_mode) {
2031  ASSERT(!IsExternalArrayElementsKind(elements_kind) || !is_js_array);
2032  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2033  // on a HElementsTransition instruction. The flag can also be removed if the
2034  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2035  // ElementsKind transitions. Finally, the dependency can be removed for stores
2036  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2037  // generated store code.
2038  if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2039      (elements_kind == FAST_ELEMENTS && is_store)) {
2040    checked_object->ClearGVNFlag(kDependsOnElementsKind);
2041  }
2042
2043  bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2044  bool fast_elements = IsFastObjectElementsKind(elements_kind);
2045  HValue* elements = AddLoadElements(checked_object);
2046  if (is_store && (fast_elements || fast_smi_only_elements) &&
2047      store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2048    HCheckMaps* check_cow_map = Add<HCheckMaps>(
2049        elements, isolate()->factory()->fixed_array_map(), top_info());
2050    check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
2051  }
2052  HInstruction* length = NULL;
2053  if (is_js_array) {
2054    length = Add<HLoadNamedField>(
2055        checked_object, HObjectAccess::ForArrayLength(elements_kind));
2056  } else {
2057    length = AddLoadFixedArrayLength(elements);
2058  }
2059  length->set_type(HType::Smi());
2060  HValue* checked_key = NULL;
2061  if (IsExternalArrayElementsKind(elements_kind)) {
2062    if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2063      NoObservableSideEffectsScope no_effects(this);
2064       HLoadExternalArrayPointer* external_elements =
2065           Add<HLoadExternalArrayPointer>(elements);
2066      IfBuilder length_checker(this);
2067      length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2068      length_checker.Then();
2069      IfBuilder negative_checker(this);
2070      HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2071          key, graph()->GetConstant0(), Token::GTE);
2072      negative_checker.Then();
2073      HInstruction* result = AddElementAccess(
2074          external_elements, key, val, bounds_check, elements_kind, is_store);
2075      negative_checker.ElseDeopt("Negative key encountered");
2076      negative_checker.End();
2077      length_checker.End();
2078      return result;
2079    } else {
2080      ASSERT(store_mode == STANDARD_STORE);
2081      checked_key = Add<HBoundsCheck>(key, length);
2082      HLoadExternalArrayPointer* external_elements =
2083          Add<HLoadExternalArrayPointer>(elements);
2084      return AddElementAccess(
2085          external_elements, checked_key, val,
2086          checked_object, elements_kind, is_store);
2087    }
2088  }
2089  ASSERT(fast_smi_only_elements ||
2090         fast_elements ||
2091         IsFastDoubleElementsKind(elements_kind));
2092
2093  // In case val is stored into a fast smi array, assure that the value is a smi
2094  // before manipulating the backing store. Otherwise the actual store may
2095  // deopt, leaving the backing store in an invalid state.
2096  if (is_store && IsFastSmiElementsKind(elements_kind) &&
2097      !val->type().IsSmi()) {
2098    val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2099  }
2100
2101  if (IsGrowStoreMode(store_mode)) {
2102    NoObservableSideEffectsScope no_effects(this);
2103    elements = BuildCheckForCapacityGrow(checked_object, elements,
2104                                         elements_kind, length, key,
2105                                         is_js_array);
2106    checked_key = key;
2107  } else {
2108    checked_key = Add<HBoundsCheck>(key, length);
2109
2110    if (is_store && (fast_elements || fast_smi_only_elements)) {
2111      if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2112        NoObservableSideEffectsScope no_effects(this);
2113        elements = BuildCopyElementsOnWrite(checked_object, elements,
2114                                            elements_kind, length);
2115      } else {
2116        HCheckMaps* check_cow_map = Add<HCheckMaps>(
2117            elements, isolate()->factory()->fixed_array_map(), top_info());
2118        check_cow_map->ClearGVNFlag(kDependsOnElementsKind);
2119      }
2120    }
2121  }
2122  return AddElementAccess(elements, checked_key, val, checked_object,
2123                          elements_kind, is_store, load_mode);
2124}
2125
2126
2127
2128HValue* HGraphBuilder::BuildAllocateArrayFromLength(
2129    JSArrayBuilder* array_builder,
2130    HValue* length_argument) {
2131  if (length_argument->IsConstant() &&
2132      HConstant::cast(length_argument)->HasSmiValue()) {
2133    int array_length = HConstant::cast(length_argument)->Integer32Value();
2134    HValue* new_object = array_length == 0
2135        ? array_builder->AllocateEmptyArray()
2136        : array_builder->AllocateArray(length_argument, length_argument);
2137    return new_object;
2138  }
2139
2140  HValue* constant_zero = graph()->GetConstant0();
2141  HConstant* max_alloc_length =
2142      Add<HConstant>(JSObject::kInitialMaxFastElementArray);
2143  HInstruction* checked_length = Add<HBoundsCheck>(length_argument,
2144                                                   max_alloc_length);
2145  IfBuilder if_builder(this);
2146  if_builder.If<HCompareNumericAndBranch>(checked_length, constant_zero,
2147                                          Token::EQ);
2148  if_builder.Then();
2149  const int initial_capacity = JSArray::kPreallocatedArrayElements;
2150  HConstant* initial_capacity_node = Add<HConstant>(initial_capacity);
2151  Push(initial_capacity_node);  // capacity
2152  Push(constant_zero);          // length
2153  if_builder.Else();
2154  if (!(top_info()->IsStub()) &&
2155      IsFastPackedElementsKind(array_builder->kind())) {
2156    // We'll come back later with better (holey) feedback.
2157    if_builder.Deopt("Holey array despite packed elements_kind feedback");
2158  } else {
2159    Push(checked_length);         // capacity
2160    Push(checked_length);         // length
2161  }
2162  if_builder.End();
2163
2164  // Figure out total size
2165  HValue* length = Pop();
2166  HValue* capacity = Pop();
2167  return array_builder->AllocateArray(capacity, length);
2168}
2169
2170HValue* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2171                                             HValue* capacity) {
2172  int elements_size;
2173  InstanceType instance_type;
2174
2175  if (IsFastDoubleElementsKind(kind)) {
2176    elements_size = kDoubleSize;
2177    instance_type = FIXED_DOUBLE_ARRAY_TYPE;
2178  } else {
2179    elements_size = kPointerSize;
2180    instance_type = FIXED_ARRAY_TYPE;
2181  }
2182
2183  HConstant* elements_size_value = Add<HConstant>(elements_size);
2184  HValue* mul = AddUncasted<HMul>(capacity, elements_size_value);
2185  mul->ClearFlag(HValue::kCanOverflow);
2186
2187  HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2188  HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2189  total_size->ClearFlag(HValue::kCanOverflow);
2190
2191  return Add<HAllocate>(total_size, HType::JSArray(),
2192      isolate()->heap()->GetPretenureMode(), instance_type);
2193}
2194
2195
2196void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2197                                                  ElementsKind kind,
2198                                                  HValue* capacity) {
2199  Factory* factory = isolate()->factory();
2200  Handle<Map> map = IsFastDoubleElementsKind(kind)
2201      ? factory->fixed_double_array_map()
2202      : factory->fixed_array_map();
2203
2204  AddStoreMapConstant(elements, map);
2205  Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2206                        capacity);
2207}
2208
2209
2210HValue* HGraphBuilder::BuildAllocateElementsAndInitializeElementsHeader(
2211    ElementsKind kind,
2212    HValue* capacity) {
2213  // The HForceRepresentation is to prevent possible deopt on int-smi
2214  // conversion after allocation but before the new object fields are set.
2215  capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2216  HValue* new_elements = BuildAllocateElements(kind, capacity);
2217  BuildInitializeElementsHeader(new_elements, kind, capacity);
2218  return new_elements;
2219}
2220
2221
2222HInnerAllocatedObject* HGraphBuilder::BuildJSArrayHeader(HValue* array,
2223    HValue* array_map,
2224    AllocationSiteMode mode,
2225    ElementsKind elements_kind,
2226    HValue* allocation_site_payload,
2227    HValue* length_field) {
2228
2229  Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2230
2231  HConstant* empty_fixed_array =
2232    Add<HConstant>(isolate()->factory()->empty_fixed_array());
2233
2234  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
2235  Add<HStoreNamedField>(array, access, empty_fixed_array);
2236  Add<HStoreNamedField>(array, HObjectAccess::ForArrayLength(elements_kind),
2237                        length_field);
2238
2239  if (mode == TRACK_ALLOCATION_SITE) {
2240    BuildCreateAllocationMemento(
2241        array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2242  }
2243
2244  int elements_location = JSArray::kSize;
2245  if (mode == TRACK_ALLOCATION_SITE) {
2246    elements_location += AllocationMemento::kSize;
2247  }
2248
2249  HInnerAllocatedObject* elements = Add<HInnerAllocatedObject>(
2250      array, Add<HConstant>(elements_location));
2251  Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(), elements);
2252  return elements;
2253}
2254
2255
2256HInstruction* HGraphBuilder::AddElementAccess(
2257    HValue* elements,
2258    HValue* checked_key,
2259    HValue* val,
2260    HValue* dependency,
2261    ElementsKind elements_kind,
2262    bool is_store,
2263    LoadKeyedHoleMode load_mode) {
2264  if (is_store) {
2265    ASSERT(val != NULL);
2266    if (elements_kind == EXTERNAL_PIXEL_ELEMENTS) {
2267      val = Add<HClampToUint8>(val);
2268    }
2269    return Add<HStoreKeyed>(elements, checked_key, val, elements_kind);
2270  }
2271
2272  ASSERT(!is_store);
2273  ASSERT(val == NULL);
2274  HLoadKeyed* load = Add<HLoadKeyed>(
2275      elements, checked_key, dependency, elements_kind, load_mode);
2276  if (FLAG_opt_safe_uint32_operations &&
2277      elements_kind == EXTERNAL_UNSIGNED_INT_ELEMENTS) {
2278    graph()->RecordUint32Instruction(load);
2279  }
2280  return load;
2281}
2282
2283
2284HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object) {
2285  return Add<HLoadNamedField>(object, HObjectAccess::ForElementsPointer());
2286}
2287
2288
2289HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(HValue* object) {
2290  return Add<HLoadNamedField>(object,
2291                              HObjectAccess::ForFixedArrayLength());
2292}
2293
2294
2295HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2296  HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2297                                                graph_->GetConstant1());
2298
2299  HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2300  new_capacity->ClearFlag(HValue::kCanOverflow);
2301
2302  HValue* min_growth = Add<HConstant>(16);
2303
2304  new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2305  new_capacity->ClearFlag(HValue::kCanOverflow);
2306
2307  return new_capacity;
2308}
2309
2310
2311void HGraphBuilder::BuildNewSpaceArrayCheck(HValue* length, ElementsKind kind) {
2312  Heap* heap = isolate()->heap();
2313  int element_size = IsFastDoubleElementsKind(kind) ? kDoubleSize
2314                                                    : kPointerSize;
2315  int max_size = heap->MaxRegularSpaceAllocationSize() / element_size;
2316  max_size -= JSArray::kSize / element_size;
2317  HConstant* max_size_constant = Add<HConstant>(max_size);
2318  Add<HBoundsCheck>(length, max_size_constant);
2319}
2320
2321
2322HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2323                                                 HValue* elements,
2324                                                 ElementsKind kind,
2325                                                 ElementsKind new_kind,
2326                                                 HValue* length,
2327                                                 HValue* new_capacity) {
2328  BuildNewSpaceArrayCheck(new_capacity, new_kind);
2329
2330  HValue* new_elements = BuildAllocateElementsAndInitializeElementsHeader(
2331      new_kind, new_capacity);
2332
2333  BuildCopyElements(elements, kind,
2334                    new_elements, new_kind,
2335                    length, new_capacity);
2336
2337  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2338                        new_elements);
2339
2340  return new_elements;
2341}
2342
2343
2344void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2345                                              ElementsKind elements_kind,
2346                                              HValue* from,
2347                                              HValue* to) {
2348  // Fast elements kinds need to be initialized in case statements below cause
2349  // a garbage collection.
2350  Factory* factory = isolate()->factory();
2351
2352  double nan_double = FixedDoubleArray::hole_nan_as_double();
2353  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2354      ? Add<HConstant>(factory->the_hole_value())
2355      : Add<HConstant>(nan_double);
2356
2357  // Special loop unfolding case
2358  static const int kLoopUnfoldLimit = 8;
2359  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <= kLoopUnfoldLimit);
2360  int initial_capacity = -1;
2361  if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2362    int constant_from = from->GetInteger32Constant();
2363    int constant_to = to->GetInteger32Constant();
2364
2365    if (constant_from == 0 && constant_to <= kLoopUnfoldLimit) {
2366      initial_capacity = constant_to;
2367    }
2368  }
2369
2370  // Since we're about to store a hole value, the store instruction below must
2371  // assume an elements kind that supports heap object values.
2372  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2373    elements_kind = FAST_HOLEY_ELEMENTS;
2374  }
2375
2376  if (initial_capacity >= 0) {
2377    for (int i = 0; i < initial_capacity; i++) {
2378      HInstruction* key = Add<HConstant>(i);
2379      Add<HStoreKeyed>(elements, key, hole, elements_kind);
2380    }
2381  } else {
2382    LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2383
2384    HValue* key = builder.BeginBody(from, to, Token::LT);
2385
2386    Add<HStoreKeyed>(elements, key, hole, elements_kind);
2387
2388    builder.EndBody();
2389  }
2390}
2391
2392
2393void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2394                                      ElementsKind from_elements_kind,
2395                                      HValue* to_elements,
2396                                      ElementsKind to_elements_kind,
2397                                      HValue* length,
2398                                      HValue* capacity) {
2399  bool pre_fill_with_holes =
2400      IsFastDoubleElementsKind(from_elements_kind) &&
2401      IsFastObjectElementsKind(to_elements_kind);
2402
2403  if (pre_fill_with_holes) {
2404    // If the copy might trigger a GC, make sure that the FixedArray is
2405    // pre-initialized with holes to make sure that it's always in a consistent
2406    // state.
2407    BuildFillElementsWithHole(to_elements, to_elements_kind,
2408                              graph()->GetConstant0(), capacity);
2409  }
2410
2411  LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
2412
2413  HValue* key = builder.BeginBody(graph()->GetConstant0(), length, Token::LT);
2414
2415  HValue* element = Add<HLoadKeyed>(from_elements, key,
2416                                    static_cast<HValue*>(NULL),
2417                                    from_elements_kind,
2418                                    ALLOW_RETURN_HOLE);
2419
2420  ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2421                       IsFastSmiElementsKind(to_elements_kind))
2422      ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2423
2424  if (IsHoleyElementsKind(from_elements_kind) &&
2425      from_elements_kind != to_elements_kind) {
2426    IfBuilder if_hole(this);
2427    if_hole.If<HCompareHoleAndBranch>(element);
2428    if_hole.Then();
2429    HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2430        ? Add<HConstant>(FixedDoubleArray::hole_nan_as_double())
2431        : graph()->GetConstantHole();
2432    Add<HStoreKeyed>(to_elements, key, hole_constant, kind);
2433    if_hole.Else();
2434    HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2435    store->SetFlag(HValue::kAllowUndefinedAsNaN);
2436    if_hole.End();
2437  } else {
2438    HStoreKeyed* store = Add<HStoreKeyed>(to_elements, key, element, kind);
2439    store->SetFlag(HValue::kAllowUndefinedAsNaN);
2440  }
2441
2442  builder.EndBody();
2443
2444  if (!pre_fill_with_holes && length != capacity) {
2445    // Fill unused capacity with the hole.
2446    BuildFillElementsWithHole(to_elements, to_elements_kind,
2447                              key, capacity);
2448  }
2449}
2450
2451
2452HValue* HGraphBuilder::BuildCloneShallowArray(HValue* boilerplate,
2453                                              HValue* allocation_site,
2454                                              AllocationSiteMode mode,
2455                                              ElementsKind kind,
2456                                              int length) {
2457  NoObservableSideEffectsScope no_effects(this);
2458
2459  // All sizes here are multiples of kPointerSize.
2460  int size = JSArray::kSize;
2461  if (mode == TRACK_ALLOCATION_SITE) {
2462    size += AllocationMemento::kSize;
2463  }
2464
2465  HValue* size_in_bytes = Add<HConstant>(size);
2466  HInstruction* object = Add<HAllocate>(size_in_bytes,
2467                                        HType::JSObject(),
2468                                        NOT_TENURED,
2469                                        JS_OBJECT_TYPE);
2470
2471  // Copy the JS array part.
2472  for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
2473    if ((i != JSArray::kElementsOffset) || (length == 0)) {
2474      HObjectAccess access = HObjectAccess::ForJSArrayOffset(i);
2475      Add<HStoreNamedField>(object, access,
2476                            Add<HLoadNamedField>(boilerplate, access));
2477    }
2478  }
2479
2480  // Create an allocation site info if requested.
2481  if (mode == TRACK_ALLOCATION_SITE) {
2482    BuildCreateAllocationMemento(
2483        object, Add<HConstant>(JSArray::kSize), allocation_site);
2484  }
2485
2486  if (length > 0) {
2487    HValue* boilerplate_elements = AddLoadElements(boilerplate);
2488    HValue* object_elements;
2489    if (IsFastDoubleElementsKind(kind)) {
2490      HValue* elems_size = Add<HConstant>(FixedDoubleArray::SizeFor(length));
2491      object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
2492          NOT_TENURED, FIXED_DOUBLE_ARRAY_TYPE);
2493    } else {
2494      HValue* elems_size = Add<HConstant>(FixedArray::SizeFor(length));
2495      object_elements = Add<HAllocate>(elems_size, HType::JSArray(),
2496          NOT_TENURED, FIXED_ARRAY_TYPE);
2497    }
2498    Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2499                          object_elements);
2500
2501    // Copy the elements array header.
2502    for (int i = 0; i < FixedArrayBase::kHeaderSize; i += kPointerSize) {
2503      HObjectAccess access = HObjectAccess::ForFixedArrayHeader(i);
2504      Add<HStoreNamedField>(object_elements, access,
2505                            Add<HLoadNamedField>(boilerplate_elements, access));
2506    }
2507
2508    // Copy the elements array contents.
2509    // TODO(mstarzinger): Teach HGraphBuilder::BuildCopyElements to unfold
2510    // copying loops with constant length up to a given boundary and use this
2511    // helper here instead.
2512    for (int i = 0; i < length; i++) {
2513      HValue* key_constant = Add<HConstant>(i);
2514      HInstruction* value = Add<HLoadKeyed>(boilerplate_elements, key_constant,
2515                                            static_cast<HValue*>(NULL), kind);
2516      Add<HStoreKeyed>(object_elements, key_constant, value, kind);
2517    }
2518  }
2519
2520  return object;
2521}
2522
2523
2524void HGraphBuilder::BuildCompareNil(
2525    HValue* value,
2526    Handle<Type> type,
2527    HIfContinuation* continuation) {
2528  IfBuilder if_nil(this);
2529  bool some_case_handled = false;
2530  bool some_case_missing = false;
2531
2532  if (type->Maybe(Type::Null())) {
2533    if (some_case_handled) if_nil.Or();
2534    if_nil.If<HCompareObjectEqAndBranch>(value, graph()->GetConstantNull());
2535    some_case_handled = true;
2536  } else {
2537    some_case_missing = true;
2538  }
2539
2540  if (type->Maybe(Type::Undefined())) {
2541    if (some_case_handled) if_nil.Or();
2542    if_nil.If<HCompareObjectEqAndBranch>(value,
2543                                         graph()->GetConstantUndefined());
2544    some_case_handled = true;
2545  } else {
2546    some_case_missing = true;
2547  }
2548
2549  if (type->Maybe(Type::Undetectable())) {
2550    if (some_case_handled) if_nil.Or();
2551    if_nil.If<HIsUndetectableAndBranch>(value);
2552    some_case_handled = true;
2553  } else {
2554    some_case_missing = true;
2555  }
2556
2557  if (some_case_missing) {
2558    if_nil.Then();
2559    if_nil.Else();
2560    if (type->NumClasses() == 1) {
2561      BuildCheckHeapObject(value);
2562      // For ICs, the map checked below is a sentinel map that gets replaced by
2563      // the monomorphic map when the code is used as a template to generate a
2564      // new IC. For optimized functions, there is no sentinel map, the map
2565      // emitted below is the actual monomorphic map.
2566      BuildCheckMap(value, type->Classes().Current());
2567    } else {
2568      if_nil.Deopt("Too many undetectable types");
2569    }
2570  }
2571
2572  if_nil.CaptureContinuation(continuation);
2573}
2574
2575
2576void HGraphBuilder::BuildCreateAllocationMemento(
2577    HValue* previous_object,
2578    HValue* previous_object_size,
2579    HValue* allocation_site) {
2580  ASSERT(allocation_site != NULL);
2581  HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2582      previous_object, previous_object_size);
2583  AddStoreMapConstant(
2584      allocation_memento, isolate()->factory()->allocation_memento_map());
2585  Add<HStoreNamedField>(
2586      allocation_memento,
2587      HObjectAccess::ForAllocationMementoSite(),
2588      allocation_site);
2589  if (FLAG_allocation_site_pretenuring) {
2590    HValue* memento_create_count = Add<HLoadNamedField>(
2591        allocation_site, HObjectAccess::ForAllocationSiteOffset(
2592            AllocationSite::kMementoCreateCountOffset));
2593    memento_create_count = AddUncasted<HAdd>(
2594        memento_create_count, graph()->GetConstant1());
2595    HStoreNamedField* store = Add<HStoreNamedField>(
2596        allocation_site, HObjectAccess::ForAllocationSiteOffset(
2597            AllocationSite::kMementoCreateCountOffset), memento_create_count);
2598    // No write barrier needed to store a smi.
2599    store->SkipWriteBarrier();
2600  }
2601}
2602
2603
2604HInstruction* HGraphBuilder::BuildGetNativeContext() {
2605  // Get the global context, then the native context
2606  HInstruction* global_object = Add<HGlobalObject>();
2607  HObjectAccess access = HObjectAccess::ForJSObjectOffset(
2608      GlobalObject::kNativeContextOffset);
2609  return Add<HLoadNamedField>(global_object, access);
2610}
2611
2612
2613HInstruction* HGraphBuilder::BuildGetArrayFunction() {
2614  HInstruction* native_context = BuildGetNativeContext();
2615  HInstruction* index =
2616      Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
2617  return Add<HLoadKeyed>(
2618      native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2619}
2620
2621
2622HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2623    ElementsKind kind,
2624    HValue* allocation_site_payload,
2625    HValue* constructor_function,
2626    AllocationSiteOverrideMode override_mode) :
2627        builder_(builder),
2628        kind_(kind),
2629        allocation_site_payload_(allocation_site_payload),
2630        constructor_function_(constructor_function) {
2631  mode_ = override_mode == DISABLE_ALLOCATION_SITES
2632      ? DONT_TRACK_ALLOCATION_SITE
2633      : AllocationSite::GetMode(kind);
2634}
2635
2636
2637HGraphBuilder::JSArrayBuilder::JSArrayBuilder(HGraphBuilder* builder,
2638                                              ElementsKind kind,
2639                                              HValue* constructor_function) :
2640    builder_(builder),
2641    kind_(kind),
2642    mode_(DONT_TRACK_ALLOCATION_SITE),
2643    allocation_site_payload_(NULL),
2644    constructor_function_(constructor_function) {
2645}
2646
2647
2648HValue* HGraphBuilder::JSArrayBuilder::EmitMapCode() {
2649  if (!builder()->top_info()->IsStub()) {
2650    // A constant map is fine.
2651    Handle<Map> map(builder()->isolate()->get_initial_js_array_map(kind_),
2652                    builder()->isolate());
2653    return builder()->Add<HConstant>(map);
2654  }
2655
2656  if (constructor_function_ != NULL && kind_ == GetInitialFastElementsKind()) {
2657    // No need for a context lookup if the kind_ matches the initial
2658    // map, because we can just load the map in that case.
2659    HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2660    return builder()->AddLoadNamedField(constructor_function_, access);
2661  }
2662
2663  HInstruction* native_context = builder()->BuildGetNativeContext();
2664  HInstruction* index = builder()->Add<HConstant>(
2665      static_cast<int32_t>(Context::JS_ARRAY_MAPS_INDEX));
2666
2667  HInstruction* map_array = builder()->Add<HLoadKeyed>(
2668      native_context, index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2669
2670  HInstruction* kind_index = builder()->Add<HConstant>(kind_);
2671
2672  return builder()->Add<HLoadKeyed>(
2673      map_array, kind_index, static_cast<HValue*>(NULL), FAST_ELEMENTS);
2674}
2675
2676
2677HValue* HGraphBuilder::JSArrayBuilder::EmitInternalMapCode() {
2678  // Find the map near the constructor function
2679  HObjectAccess access = HObjectAccess::ForPrototypeOrInitialMap();
2680  return builder()->AddLoadNamedField(constructor_function_, access);
2681}
2682
2683
2684HValue* HGraphBuilder::JSArrayBuilder::EstablishAllocationSize(
2685    HValue* length_node) {
2686  ASSERT(length_node != NULL);
2687
2688  int base_size = JSArray::kSize;
2689  if (mode_ == TRACK_ALLOCATION_SITE) {
2690    base_size += AllocationMemento::kSize;
2691  }
2692
2693  STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2694  base_size += FixedArray::kHeaderSize;
2695
2696  HInstruction* elements_size_value =
2697      builder()->Add<HConstant>(elements_size());
2698  HInstruction* mul = HMul::NewImul(builder()->zone(), builder()->context(),
2699                                    length_node, elements_size_value);
2700  builder()->AddInstruction(mul);
2701  HInstruction* base = builder()->Add<HConstant>(base_size);
2702  HInstruction* total_size = HAdd::New(builder()->zone(), builder()->context(),
2703                                       base, mul);
2704  total_size->ClearFlag(HValue::kCanOverflow);
2705  builder()->AddInstruction(total_size);
2706  return total_size;
2707}
2708
2709
2710HValue* HGraphBuilder::JSArrayBuilder::EstablishEmptyArrayAllocationSize() {
2711  int base_size = JSArray::kSize;
2712  if (mode_ == TRACK_ALLOCATION_SITE) {
2713    base_size += AllocationMemento::kSize;
2714  }
2715
2716  base_size += IsFastDoubleElementsKind(kind_)
2717      ? FixedDoubleArray::SizeFor(initial_capacity())
2718      : FixedArray::SizeFor(initial_capacity());
2719
2720  return builder()->Add<HConstant>(base_size);
2721}
2722
2723
2724HValue* HGraphBuilder::JSArrayBuilder::AllocateEmptyArray() {
2725  HValue* size_in_bytes = EstablishEmptyArrayAllocationSize();
2726  HConstant* capacity = builder()->Add<HConstant>(initial_capacity());
2727  return AllocateArray(size_in_bytes,
2728                       capacity,
2729                       builder()->graph()->GetConstant0());
2730}
2731
2732
2733HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* capacity,
2734                                                     HValue* length_field,
2735                                                     FillMode fill_mode) {
2736  HValue* size_in_bytes = EstablishAllocationSize(capacity);
2737  return AllocateArray(size_in_bytes, capacity, length_field, fill_mode);
2738}
2739
2740
2741HValue* HGraphBuilder::JSArrayBuilder::AllocateArray(HValue* size_in_bytes,
2742                                                     HValue* capacity,
2743                                                     HValue* length_field,
2744                                                     FillMode fill_mode) {
2745  // These HForceRepresentations are because we store these as fields in the
2746  // objects we construct, and an int32-to-smi HChange could deopt. Accept
2747  // the deopt possibility now, before allocation occurs.
2748  capacity =
2749      builder()->AddUncasted<HForceRepresentation>(capacity,
2750                                                   Representation::Smi());
2751  length_field =
2752      builder()->AddUncasted<HForceRepresentation>(length_field,
2753                                                   Representation::Smi());
2754  // Allocate (dealing with failure appropriately)
2755  HAllocate* new_object = builder()->Add<HAllocate>(size_in_bytes,
2756      HType::JSArray(), NOT_TENURED, JS_ARRAY_TYPE);
2757
2758  // Folded array allocation should be aligned if it has fast double elements.
2759  if (IsFastDoubleElementsKind(kind_)) {
2760     new_object->MakeDoubleAligned();
2761  }
2762
2763  // Fill in the fields: map, properties, length
2764  HValue* map;
2765  if (allocation_site_payload_ == NULL) {
2766    map = EmitInternalMapCode();
2767  } else {
2768    map = EmitMapCode();
2769  }
2770  elements_location_ = builder()->BuildJSArrayHeader(new_object,
2771                                                     map,
2772                                                     mode_,
2773                                                     kind_,
2774                                                     allocation_site_payload_,
2775                                                     length_field);
2776
2777  // Initialize the elements
2778  builder()->BuildInitializeElementsHeader(elements_location_, kind_, capacity);
2779
2780  if (fill_mode == FILL_WITH_HOLE) {
2781    builder()->BuildFillElementsWithHole(elements_location_, kind_,
2782                                         graph()->GetConstant0(), capacity);
2783  }
2784
2785  return new_object;
2786}
2787
2788
2789HStoreNamedField* HGraphBuilder::AddStoreMapConstant(HValue *object,
2790                                                     Handle<Map> map) {
2791  return Add<HStoreNamedField>(object, HObjectAccess::ForMap(),
2792                               Add<HConstant>(map));
2793}
2794
2795
2796HValue* HGraphBuilder::AddLoadJSBuiltin(Builtins::JavaScript builtin) {
2797  HGlobalObject* global_object = Add<HGlobalObject>();
2798  HObjectAccess access = HObjectAccess::ForJSObjectOffset(
2799      GlobalObject::kBuiltinsOffset);
2800  HValue* builtins = Add<HLoadNamedField>(global_object, access);
2801  HObjectAccess function_access = HObjectAccess::ForJSObjectOffset(
2802      JSBuiltinsObject::OffsetOfFunctionWithId(builtin));
2803  return Add<HLoadNamedField>(builtins, function_access);
2804}
2805
2806
2807HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info)
2808    : HGraphBuilder(info),
2809      function_state_(NULL),
2810      initial_function_state_(this, info, NORMAL_RETURN),
2811      ast_context_(NULL),
2812      break_scope_(NULL),
2813      inlined_count_(0),
2814      globals_(10, info->zone()),
2815      inline_bailout_(false),
2816      osr_(new(info->zone()) HOsrBuilder(this)) {
2817  // This is not initialized in the initializer list because the
2818  // constructor for the initial state relies on function_state_ == NULL
2819  // to know it's the initial state.
2820  function_state_= &initial_function_state_;
2821  InitializeAstVisitor(info->isolate());
2822  if (FLAG_emit_opt_code_positions) {
2823    SetSourcePosition(info->shared_info()->start_position());
2824  }
2825}
2826
2827
2828HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
2829                                                HBasicBlock* second,
2830                                                BailoutId join_id) {
2831  if (first == NULL) {
2832    return second;
2833  } else if (second == NULL) {
2834    return first;
2835  } else {
2836    HBasicBlock* join_block = graph()->CreateBasicBlock();
2837    Goto(first, join_block);
2838    Goto(second, join_block);
2839    join_block->SetJoinId(join_id);
2840    return join_block;
2841  }
2842}
2843
2844
2845HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
2846                                                  HBasicBlock* exit_block,
2847                                                  HBasicBlock* continue_block) {
2848  if (continue_block != NULL) {
2849    if (exit_block != NULL) Goto(exit_block, continue_block);
2850    continue_block->SetJoinId(statement->ContinueId());
2851    return continue_block;
2852  }
2853  return exit_block;
2854}
2855
2856
2857HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
2858                                                HBasicBlock* loop_entry,
2859                                                HBasicBlock* body_exit,
2860                                                HBasicBlock* loop_successor,
2861                                                HBasicBlock* break_block) {
2862  if (body_exit != NULL) Goto(body_exit, loop_entry);
2863  loop_entry->PostProcessLoopHeader(statement);
2864  if (break_block != NULL) {
2865    if (loop_successor != NULL) Goto(loop_successor, break_block);
2866    break_block->SetJoinId(statement->ExitId());
2867    return break_block;
2868  }
2869  return loop_successor;
2870}
2871
2872
2873// Build a new loop header block and set it as the current block.
2874HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
2875  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
2876  Goto(loop_entry);
2877  set_current_block(loop_entry);
2878  return loop_entry;
2879}
2880
2881
2882HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
2883    IterationStatement* statement) {
2884  HBasicBlock* loop_entry = osr()->HasOsrEntryAt(statement)
2885      ? osr()->BuildOsrLoopEntry(statement)
2886      : BuildLoopEntry();
2887  return loop_entry;
2888}
2889
2890
2891void HBasicBlock::FinishExit(HControlInstruction* instruction, int position) {
2892  Finish(instruction, position);
2893  ClearEnvironment();
2894}
2895
2896
2897HGraph::HGraph(CompilationInfo* info)
2898    : isolate_(info->isolate()),
2899      next_block_id_(0),
2900      entry_block_(NULL),
2901      blocks_(8, info->zone()),
2902      values_(16, info->zone()),
2903      phi_list_(NULL),
2904      uint32_instructions_(NULL),
2905      osr_(NULL),
2906      info_(info),
2907      zone_(info->zone()),
2908      is_recursive_(false),
2909      use_optimistic_licm_(false),
2910      depends_on_empty_array_proto_elements_(false),
2911      type_change_checksum_(0),
2912      maximum_environment_size_(0),
2913      no_side_effects_scope_count_(0),
2914      disallow_adding_new_values_(false) {
2915  if (info->IsStub()) {
2916    HydrogenCodeStub* stub = info->code_stub();
2917    CodeStubInterfaceDescriptor* descriptor =
2918        stub->GetInterfaceDescriptor(isolate_);
2919    start_environment_ =
2920        new(zone_) HEnvironment(zone_, descriptor->environment_length());
2921  } else {
2922    start_environment_ =
2923        new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
2924  }
2925  start_environment_->set_ast_id(BailoutId::FunctionEntry());
2926  entry_block_ = CreateBasicBlock();
2927  entry_block_->SetInitialEnvironment(start_environment_);
2928}
2929
2930
2931HBasicBlock* HGraph::CreateBasicBlock() {
2932  HBasicBlock* result = new(zone()) HBasicBlock(this);
2933  blocks_.Add(result, zone());
2934  return result;
2935}
2936
2937
2938void HGraph::FinalizeUniqueness() {
2939  DisallowHeapAllocation no_gc;
2940  ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
2941  for (int i = 0; i < blocks()->length(); ++i) {
2942    for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
2943      it.Current()->FinalizeUniqueness();
2944    }
2945  }
2946}
2947
2948
2949// Block ordering was implemented with two mutually recursive methods,
2950// HGraph::Postorder and HGraph::PostorderLoopBlocks.
2951// The recursion could lead to stack overflow so the algorithm has been
2952// implemented iteratively.
2953// At a high level the algorithm looks like this:
2954//
2955// Postorder(block, loop_header) : {
2956//   if (block has already been visited or is of another loop) return;
2957//   mark block as visited;
2958//   if (block is a loop header) {
2959//     VisitLoopMembers(block, loop_header);
2960//     VisitSuccessorsOfLoopHeader(block);
2961//   } else {
2962//     VisitSuccessors(block)
2963//   }
2964//   put block in result list;
2965// }
2966//
2967// VisitLoopMembers(block, outer_loop_header) {
2968//   foreach (block b in block loop members) {
2969//     VisitSuccessorsOfLoopMember(b, outer_loop_header);
2970//     if (b is loop header) VisitLoopMembers(b);
2971//   }
2972// }
2973//
2974// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
2975//   foreach (block b in block successors) Postorder(b, outer_loop_header)
2976// }
2977//
2978// VisitSuccessorsOfLoopHeader(block) {
2979//   foreach (block b in block successors) Postorder(b, block)
2980// }
2981//
2982// VisitSuccessors(block, loop_header) {
2983//   foreach (block b in block successors) Postorder(b, loop_header)
2984// }
2985//
2986// The ordering is started calling Postorder(entry, NULL).
2987//
2988// Each instance of PostorderProcessor represents the "stack frame" of the
2989// recursion, and particularly keeps the state of the loop (iteration) of the
2990// "Visit..." function it represents.
2991// To recycle memory we keep all the frames in a double linked list but
2992// this means that we cannot use constructors to initialize the frames.
2993//
2994class PostorderProcessor : public ZoneObject {
2995 public:
2996  // Back link (towards the stack bottom).
2997  PostorderProcessor* parent() {return father_; }
2998  // Forward link (towards the stack top).
2999  PostorderProcessor* child() {return child_; }
3000  HBasicBlock* block() { return block_; }
3001  HLoopInformation* loop() { return loop_; }
3002  HBasicBlock* loop_header() { return loop_header_; }
3003
3004  static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3005                                                  HBasicBlock* block,
3006                                                  BitVector* visited) {
3007    PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3008    return result->SetupSuccessors(zone, block, NULL, visited);
3009  }
3010
3011  PostorderProcessor* PerformStep(Zone* zone,
3012                                  BitVector* visited,
3013                                  ZoneList<HBasicBlock*>* order) {
3014    PostorderProcessor* next =
3015        PerformNonBacktrackingStep(zone, visited, order);
3016    if (next != NULL) {
3017      return next;
3018    } else {
3019      return Backtrack(zone, visited, order);
3020    }
3021  }
3022
3023 private:
3024  explicit PostorderProcessor(PostorderProcessor* father)
3025      : father_(father), child_(NULL), successor_iterator(NULL) { }
3026
3027  // Each enum value states the cycle whose state is kept by this instance.
3028  enum LoopKind {
3029    NONE,
3030    SUCCESSORS,
3031    SUCCESSORS_OF_LOOP_HEADER,
3032    LOOP_MEMBERS,
3033    SUCCESSORS_OF_LOOP_MEMBER
3034  };
3035
3036  // Each "Setup..." method is like a constructor for a cycle state.
3037  PostorderProcessor* SetupSuccessors(Zone* zone,
3038                                      HBasicBlock* block,
3039                                      HBasicBlock* loop_header,
3040                                      BitVector* visited) {
3041    if (block == NULL || visited->Contains(block->block_id()) ||
3042        block->parent_loop_header() != loop_header) {
3043      kind_ = NONE;
3044      block_ = NULL;
3045      loop_ = NULL;
3046      loop_header_ = NULL;
3047      return this;
3048    } else {
3049      block_ = block;
3050      loop_ = NULL;
3051      visited->Add(block->block_id());
3052
3053      if (block->IsLoopHeader()) {
3054        kind_ = SUCCESSORS_OF_LOOP_HEADER;
3055        loop_header_ = block;
3056        InitializeSuccessors();
3057        PostorderProcessor* result = Push(zone);
3058        return result->SetupLoopMembers(zone, block, block->loop_information(),
3059                                        loop_header);
3060      } else {
3061        ASSERT(block->IsFinished());
3062        kind_ = SUCCESSORS;
3063        loop_header_ = loop_header;
3064        InitializeSuccessors();
3065        return this;
3066      }
3067    }
3068  }
3069
3070  PostorderProcessor* SetupLoopMembers(Zone* zone,
3071                                       HBasicBlock* block,
3072                                       HLoopInformation* loop,
3073                                       HBasicBlock* loop_header) {
3074    kind_ = LOOP_MEMBERS;
3075    block_ = block;
3076    loop_ = loop;
3077    loop_header_ = loop_header;
3078    InitializeLoopMembers();
3079    return this;
3080  }
3081
3082  PostorderProcessor* SetupSuccessorsOfLoopMember(
3083      HBasicBlock* block,
3084      HLoopInformation* loop,
3085      HBasicBlock* loop_header) {
3086    kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3087    block_ = block;
3088    loop_ = loop;
3089    loop_header_ = loop_header;
3090    InitializeSuccessors();
3091    return this;
3092  }
3093
3094  // This method "allocates" a new stack frame.
3095  PostorderProcessor* Push(Zone* zone) {
3096    if (child_ == NULL) {
3097      child_ = new(zone) PostorderProcessor(this);
3098    }
3099    return child_;
3100  }
3101
3102  void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3103    ASSERT(block_->end()->FirstSuccessor() == NULL ||
3104           order->Contains(block_->end()->FirstSuccessor()) ||
3105           block_->end()->FirstSuccessor()->IsLoopHeader());
3106    ASSERT(block_->end()->SecondSuccessor() == NULL ||
3107           order->Contains(block_->end()->SecondSuccessor()) ||
3108           block_->end()->SecondSuccessor()->IsLoopHeader());
3109    order->Add(block_, zone);
3110  }
3111
3112  // This method is the basic block to walk up the stack.
3113  PostorderProcessor* Pop(Zone* zone,
3114                          BitVector* visited,
3115                          ZoneList<HBasicBlock*>* order) {
3116    switch (kind_) {
3117      case SUCCESSORS:
3118      case SUCCESSORS_OF_LOOP_HEADER:
3119        ClosePostorder(order, zone);
3120        return father_;
3121      case LOOP_MEMBERS:
3122        return father_;
3123      case SUCCESSORS_OF_LOOP_MEMBER:
3124        if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3125          // In this case we need to perform a LOOP_MEMBERS cycle so we
3126          // initialize it and return this instead of father.
3127          return SetupLoopMembers(zone, block(),
3128                                  block()->loop_information(), loop_header_);
3129        } else {
3130          return father_;
3131        }
3132      case NONE:
3133        return father_;
3134    }
3135    UNREACHABLE();
3136    return NULL;
3137  }
3138
3139  // Walks up the stack.
3140  PostorderProcessor* Backtrack(Zone* zone,
3141                                BitVector* visited,
3142                                ZoneList<HBasicBlock*>* order) {
3143    PostorderProcessor* parent = Pop(zone, visited, order);
3144    while (parent != NULL) {
3145      PostorderProcessor* next =
3146          parent->PerformNonBacktrackingStep(zone, visited, order);
3147      if (next != NULL) {
3148        return next;
3149      } else {
3150        parent = parent->Pop(zone, visited, order);
3151      }
3152    }
3153    return NULL;
3154  }
3155
3156  PostorderProcessor* PerformNonBacktrackingStep(
3157      Zone* zone,
3158      BitVector* visited,
3159      ZoneList<HBasicBlock*>* order) {
3160    HBasicBlock* next_block;
3161    switch (kind_) {
3162      case SUCCESSORS:
3163        next_block = AdvanceSuccessors();
3164        if (next_block != NULL) {
3165          PostorderProcessor* result = Push(zone);
3166          return result->SetupSuccessors(zone, next_block,
3167                                         loop_header_, visited);
3168        }
3169        break;
3170      case SUCCESSORS_OF_LOOP_HEADER:
3171        next_block = AdvanceSuccessors();
3172        if (next_block != NULL) {
3173          PostorderProcessor* result = Push(zone);
3174          return result->SetupSuccessors(zone, next_block,
3175                                         block(), visited);
3176        }
3177        break;
3178      case LOOP_MEMBERS:
3179        next_block = AdvanceLoopMembers();
3180        if (next_block != NULL) {
3181          PostorderProcessor* result = Push(zone);
3182          return result->SetupSuccessorsOfLoopMember(next_block,
3183                                                     loop_, loop_header_);
3184        }
3185        break;
3186      case SUCCESSORS_OF_LOOP_MEMBER:
3187        next_block = AdvanceSuccessors();
3188        if (next_block != NULL) {
3189          PostorderProcessor* result = Push(zone);
3190          return result->SetupSuccessors(zone, next_block,
3191                                         loop_header_, visited);
3192        }
3193        break;
3194      case NONE:
3195        return NULL;
3196    }
3197    return NULL;
3198  }
3199
3200  // The following two methods implement a "foreach b in successors" cycle.
3201  void InitializeSuccessors() {
3202    loop_index = 0;
3203    loop_length = 0;
3204    successor_iterator = HSuccessorIterator(block_->end());
3205  }
3206
3207  HBasicBlock* AdvanceSuccessors() {
3208    if (!successor_iterator.Done()) {
3209      HBasicBlock* result = successor_iterator.Current();
3210      successor_iterator.Advance();
3211      return result;
3212    }
3213    return NULL;
3214  }
3215
3216  // The following two methods implement a "foreach b in loop members" cycle.
3217  void InitializeLoopMembers() {
3218    loop_index = 0;
3219    loop_length = loop_->blocks()->length();
3220  }
3221
3222  HBasicBlock* AdvanceLoopMembers() {
3223    if (loop_index < loop_length) {
3224      HBasicBlock* result = loop_->blocks()->at(loop_index);
3225      loop_index++;
3226      return result;
3227    } else {
3228      return NULL;
3229    }
3230  }
3231
3232  LoopKind kind_;
3233  PostorderProcessor* father_;
3234  PostorderProcessor* child_;
3235  HLoopInformation* loop_;
3236  HBasicBlock* block_;
3237  HBasicBlock* loop_header_;
3238  int loop_index;
3239  int loop_length;
3240  HSuccessorIterator successor_iterator;
3241};
3242
3243
3244void HGraph::OrderBlocks() {
3245  CompilationPhase phase("H_Block ordering", info());
3246  BitVector visited(blocks_.length(), zone());
3247
3248  ZoneList<HBasicBlock*> reverse_result(8, zone());
3249  HBasicBlock* start = blocks_[0];
3250  PostorderProcessor* postorder =
3251      PostorderProcessor::CreateEntryProcessor(zone(), start, &visited);
3252  while (postorder != NULL) {
3253    postorder = postorder->PerformStep(zone(), &visited, &reverse_result);
3254  }
3255  blocks_.Rewind(0);
3256  int index = 0;
3257  for (int i = reverse_result.length() - 1; i >= 0; --i) {
3258    HBasicBlock* b = reverse_result[i];
3259    blocks_.Add(b, zone());
3260    b->set_block_id(index++);
3261  }
3262}
3263
3264
3265void HGraph::AssignDominators() {
3266  HPhase phase("H_Assign dominators", this);
3267  for (int i = 0; i < blocks_.length(); ++i) {
3268    HBasicBlock* block = blocks_[i];
3269    if (block->IsLoopHeader()) {
3270      // Only the first predecessor of a loop header is from outside the loop.
3271      // All others are back edges, and thus cannot dominate the loop header.
3272      block->AssignCommonDominator(block->predecessors()->first());
3273      block->AssignLoopSuccessorDominators();
3274    } else {
3275      for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3276        blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3277      }
3278    }
3279  }
3280}
3281
3282
3283bool HGraph::CheckArgumentsPhiUses() {
3284  int block_count = blocks_.length();
3285  for (int i = 0; i < block_count; ++i) {
3286    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3287      HPhi* phi = blocks_[i]->phis()->at(j);
3288      // We don't support phi uses of arguments for now.
3289      if (phi->CheckFlag(HValue::kIsArguments)) return false;
3290    }
3291  }
3292  return true;
3293}
3294
3295
3296bool HGraph::CheckConstPhiUses() {
3297  int block_count = blocks_.length();
3298  for (int i = 0; i < block_count; ++i) {
3299    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3300      HPhi* phi = blocks_[i]->phis()->at(j);
3301      // Check for the hole value (from an uninitialized const).
3302      for (int k = 0; k < phi->OperandCount(); k++) {
3303        if (phi->OperandAt(k) == GetConstantHole()) return false;
3304      }
3305    }
3306  }
3307  return true;
3308}
3309
3310
3311void HGraph::CollectPhis() {
3312  int block_count = blocks_.length();
3313  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3314  for (int i = 0; i < block_count; ++i) {
3315    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3316      HPhi* phi = blocks_[i]->phis()->at(j);
3317      phi_list_->Add(phi, zone());
3318    }
3319  }
3320}
3321
3322
3323// Implementation of utility class to encapsulate the translation state for
3324// a (possibly inlined) function.
3325FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3326                             CompilationInfo* info,
3327                             InliningKind inlining_kind)
3328    : owner_(owner),
3329      compilation_info_(info),
3330      call_context_(NULL),
3331      inlining_kind_(inlining_kind),
3332      function_return_(NULL),
3333      test_context_(NULL),
3334      entry_(NULL),
3335      arguments_object_(NULL),
3336      arguments_elements_(NULL),
3337      outer_(owner->function_state()) {
3338  if (outer_ != NULL) {
3339    // State for an inline function.
3340    if (owner->ast_context()->IsTest()) {
3341      HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3342      HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3343      if_true->MarkAsInlineReturnTarget(owner->current_block());
3344      if_false->MarkAsInlineReturnTarget(owner->current_block());
3345      TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3346      Expression* cond = outer_test_context->condition();
3347      // The AstContext constructor pushed on the context stack.  This newed
3348      // instance is the reason that AstContext can't be BASE_EMBEDDED.
3349      test_context_ = new TestContext(owner, cond, if_true, if_false);
3350    } else {
3351      function_return_ = owner->graph()->CreateBasicBlock();
3352      function_return()->MarkAsInlineReturnTarget(owner->current_block());
3353    }
3354    // Set this after possibly allocating a new TestContext above.
3355    call_context_ = owner->ast_context();
3356  }
3357
3358  // Push on the state stack.
3359  owner->set_function_state(this);
3360}
3361
3362
3363FunctionState::~FunctionState() {
3364  delete test_context_;
3365  owner_->set_function_state(outer_);
3366}
3367
3368
3369// Implementation of utility classes to represent an expression's context in
3370// the AST.
3371AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3372    : owner_(owner),
3373      kind_(kind),
3374      outer_(owner->ast_context()),
3375      for_typeof_(false) {
3376  owner->set_ast_context(this);  // Push.
3377#ifdef DEBUG
3378  ASSERT(owner->environment()->frame_type() == JS_FUNCTION);
3379  original_length_ = owner->environment()->length();
3380#endif
3381}
3382
3383
3384AstContext::~AstContext() {
3385  owner_->set_ast_context(outer_);  // Pop.
3386}
3387
3388
3389EffectContext::~EffectContext() {
3390  ASSERT(owner()->HasStackOverflow() ||
3391         owner()->current_block() == NULL ||
3392         (owner()->environment()->length() == original_length_ &&
3393          owner()->environment()->frame_type() == JS_FUNCTION));
3394}
3395
3396
3397ValueContext::~ValueContext() {
3398  ASSERT(owner()->HasStackOverflow() ||
3399         owner()->current_block() == NULL ||
3400         (owner()->environment()->length() == original_length_ + 1 &&
3401          owner()->environment()->frame_type() == JS_FUNCTION));
3402}
3403
3404
3405void EffectContext::ReturnValue(HValue* value) {
3406  // The value is simply ignored.
3407}
3408
3409
3410void ValueContext::ReturnValue(HValue* value) {
3411  // The value is tracked in the bailout environment, and communicated
3412  // through the environment as the result of the expression.
3413  if (!arguments_allowed() && value->CheckFlag(HValue::kIsArguments)) {
3414    owner()->Bailout(kBadValueContextForArgumentsValue);
3415  }
3416  owner()->Push(value);
3417}
3418
3419
3420void TestContext::ReturnValue(HValue* value) {
3421  BuildBranch(value);
3422}
3423
3424
3425void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3426  ASSERT(!instr->IsControlInstruction());
3427  owner()->AddInstruction(instr);
3428  if (instr->HasObservableSideEffects()) {
3429    owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3430  }
3431}
3432
3433
3434void EffectContext::ReturnControl(HControlInstruction* instr,
3435                                  BailoutId ast_id) {
3436  ASSERT(!instr->HasObservableSideEffects());
3437  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3438  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3439  instr->SetSuccessorAt(0, empty_true);
3440  instr->SetSuccessorAt(1, empty_false);
3441  owner()->FinishCurrentBlock(instr);
3442  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3443  owner()->set_current_block(join);
3444}
3445
3446
3447void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3448                                       BailoutId ast_id) {
3449  HBasicBlock* true_branch = NULL;
3450  HBasicBlock* false_branch = NULL;
3451  continuation->Continue(&true_branch, &false_branch);
3452  if (!continuation->IsTrueReachable()) {
3453    owner()->set_current_block(false_branch);
3454  } else if (!continuation->IsFalseReachable()) {
3455    owner()->set_current_block(true_branch);
3456  } else {
3457    HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3458    owner()->set_current_block(join);
3459  }
3460}
3461
3462
3463void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3464  ASSERT(!instr->IsControlInstruction());
3465  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3466    return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3467  }
3468  owner()->AddInstruction(instr);
3469  owner()->Push(instr);
3470  if (instr->HasObservableSideEffects()) {
3471    owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3472  }
3473}
3474
3475
3476void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3477  ASSERT(!instr->HasObservableSideEffects());
3478  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3479    return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3480  }
3481  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3482  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3483  instr->SetSuccessorAt(0, materialize_true);
3484  instr->SetSuccessorAt(1, materialize_false);
3485  owner()->FinishCurrentBlock(instr);
3486  owner()->set_current_block(materialize_true);
3487  owner()->Push(owner()->graph()->GetConstantTrue());
3488  owner()->set_current_block(materialize_false);
3489  owner()->Push(owner()->graph()->GetConstantFalse());
3490  HBasicBlock* join =
3491    owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3492  owner()->set_current_block(join);
3493}
3494
3495
3496void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3497                                      BailoutId ast_id) {
3498  HBasicBlock* materialize_true = NULL;
3499  HBasicBlock* materialize_false = NULL;
3500  continuation->Continue(&materialize_true, &materialize_false);
3501  if (continuation->IsTrueReachable()) {
3502    owner()->set_current_block(materialize_true);
3503    owner()->Push(owner()->graph()->GetConstantTrue());
3504    owner()->set_current_block(materialize_true);
3505  }
3506  if (continuation->IsFalseReachable()) {
3507    owner()->set_current_block(materialize_false);
3508    owner()->Push(owner()->graph()->GetConstantFalse());
3509    owner()->set_current_block(materialize_false);
3510  }
3511  if (continuation->TrueAndFalseReachable()) {
3512    HBasicBlock* join =
3513        owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3514    owner()->set_current_block(join);
3515  }
3516}
3517
3518
3519void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3520  ASSERT(!instr->IsControlInstruction());
3521  HOptimizedGraphBuilder* builder = owner();
3522  builder->AddInstruction(instr);
3523  // We expect a simulate after every expression with side effects, though
3524  // this one isn't actually needed (and wouldn't work if it were targeted).
3525  if (instr->HasObservableSideEffects()) {
3526    builder->Push(instr);
3527    builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3528    builder->Pop();
3529  }
3530  BuildBranch(instr);
3531}
3532
3533
3534void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3535  ASSERT(!instr->HasObservableSideEffects());
3536  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3537  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3538  instr->SetSuccessorAt(0, empty_true);
3539  instr->SetSuccessorAt(1, empty_false);
3540  owner()->FinishCurrentBlock(instr);
3541  owner()->Goto(empty_true, if_true(), owner()->function_state());
3542  owner()->Goto(empty_false, if_false(), owner()->function_state());
3543  owner()->set_current_block(NULL);
3544}
3545
3546
3547void TestContext::ReturnContinuation(HIfContinuation* continuation,
3548                                     BailoutId ast_id) {
3549  HBasicBlock* true_branch = NULL;
3550  HBasicBlock* false_branch = NULL;
3551  continuation->Continue(&true_branch, &false_branch);
3552  if (continuation->IsTrueReachable()) {
3553    owner()->Goto(true_branch, if_true(), owner()->function_state());
3554  }
3555  if (continuation->IsFalseReachable()) {
3556    owner()->Goto(false_branch, if_false(), owner()->function_state());
3557  }
3558  owner()->set_current_block(NULL);
3559}
3560
3561
3562void TestContext::BuildBranch(HValue* value) {
3563  // We expect the graph to be in edge-split form: there is no edge that
3564  // connects a branch node to a join node.  We conservatively ensure that
3565  // property by always adding an empty block on the outgoing edges of this
3566  // branch.
3567  HOptimizedGraphBuilder* builder = owner();
3568  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3569    builder->Bailout(kArgumentsObjectValueInATestContext);
3570  }
3571  ToBooleanStub::Types expected(condition()->to_boolean_types());
3572  ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3573}
3574
3575
3576// HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3577#define CHECK_BAILOUT(call)                     \
3578  do {                                          \
3579    call;                                       \
3580    if (HasStackOverflow()) return;             \
3581  } while (false)
3582
3583
3584#define CHECK_ALIVE(call)                                       \
3585  do {                                                          \
3586    call;                                                       \
3587    if (HasStackOverflow() || current_block() == NULL) return;  \
3588  } while (false)
3589
3590
3591#define CHECK_ALIVE_OR_RETURN(call, value)                            \
3592  do {                                                                \
3593    call;                                                             \
3594    if (HasStackOverflow() || current_block() == NULL) return value;  \
3595  } while (false)
3596
3597
3598void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
3599  current_info()->set_bailout_reason(reason);
3600  SetStackOverflow();
3601}
3602
3603
3604void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
3605  EffectContext for_effect(this);
3606  Visit(expr);
3607}
3608
3609
3610void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
3611                                           ArgumentsAllowedFlag flag) {
3612  ValueContext for_value(this, flag);
3613  Visit(expr);
3614}
3615
3616
3617void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
3618  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3619  for_value.set_for_typeof(true);
3620  Visit(expr);
3621}
3622
3623
3624
3625void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
3626                                             HBasicBlock* true_block,
3627                                             HBasicBlock* false_block) {
3628  TestContext for_test(this, expr, true_block, false_block);
3629  Visit(expr);
3630}
3631
3632
3633void HOptimizedGraphBuilder::VisitArgument(Expression* expr) {
3634  CHECK_ALIVE(VisitForValue(expr));
3635  Push(Add<HPushArgument>(Pop()));
3636}
3637
3638
3639void HOptimizedGraphBuilder::VisitArgumentList(
3640    ZoneList<Expression*>* arguments) {
3641  for (int i = 0; i < arguments->length(); i++) {
3642    CHECK_ALIVE(VisitArgument(arguments->at(i)));
3643  }
3644}
3645
3646
3647void HOptimizedGraphBuilder::VisitExpressions(
3648    ZoneList<Expression*>* exprs) {
3649  for (int i = 0; i < exprs->length(); ++i) {
3650    CHECK_ALIVE(VisitForValue(exprs->at(i)));
3651  }
3652}
3653
3654
3655bool HOptimizedGraphBuilder::BuildGraph() {
3656  if (current_info()->function()->is_generator()) {
3657    Bailout(kFunctionIsAGenerator);
3658    return false;
3659  }
3660  Scope* scope = current_info()->scope();
3661  if (scope->HasIllegalRedeclaration()) {
3662    Bailout(kFunctionWithIllegalRedeclaration);
3663    return false;
3664  }
3665  if (scope->calls_eval()) {
3666    Bailout(kFunctionCallsEval);
3667    return false;
3668  }
3669  SetUpScope(scope);
3670
3671  // Add an edge to the body entry.  This is warty: the graph's start
3672  // environment will be used by the Lithium translation as the initial
3673  // environment on graph entry, but it has now been mutated by the
3674  // Hydrogen translation of the instructions in the start block.  This
3675  // environment uses values which have not been defined yet.  These
3676  // Hydrogen instructions will then be replayed by the Lithium
3677  // translation, so they cannot have an environment effect.  The edge to
3678  // the body's entry block (along with some special logic for the start
3679  // block in HInstruction::InsertAfter) seals the start block from
3680  // getting unwanted instructions inserted.
3681  //
3682  // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
3683  // Make the Hydrogen instructions in the initial block into Hydrogen
3684  // values (but not instructions), present in the initial environment and
3685  // not replayed by the Lithium translation.
3686  HEnvironment* initial_env = environment()->CopyWithoutHistory();
3687  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3688  Goto(body_entry);
3689  body_entry->SetJoinId(BailoutId::FunctionEntry());
3690  set_current_block(body_entry);
3691
3692  // Handle implicit declaration of the function name in named function
3693  // expressions before other declarations.
3694  if (scope->is_function_scope() && scope->function() != NULL) {
3695    VisitVariableDeclaration(scope->function());
3696  }
3697  VisitDeclarations(scope->declarations());
3698  Add<HSimulate>(BailoutId::Declarations());
3699
3700  Add<HStackCheck>(HStackCheck::kFunctionEntry);
3701
3702  VisitStatements(current_info()->function()->body());
3703  if (HasStackOverflow()) return false;
3704
3705  if (current_block() != NULL) {
3706    Add<HReturn>(graph()->GetConstantUndefined());
3707    set_current_block(NULL);
3708  }
3709
3710  // If the checksum of the number of type info changes is the same as the
3711  // last time this function was compiled, then this recompile is likely not
3712  // due to missing/inadequate type feedback, but rather too aggressive
3713  // optimization. Disable optimistic LICM in that case.
3714  Handle<Code> unoptimized_code(current_info()->shared_info()->code());
3715  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
3716  Handle<TypeFeedbackInfo> type_info(
3717      TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
3718  int checksum = type_info->own_type_change_checksum();
3719  int composite_checksum = graph()->update_type_change_checksum(checksum);
3720  graph()->set_use_optimistic_licm(
3721      !type_info->matches_inlined_type_change_checksum(composite_checksum));
3722  type_info->set_inlined_type_change_checksum(composite_checksum);
3723
3724  // Perform any necessary OSR-specific cleanups or changes to the graph.
3725  osr()->FinishGraph();
3726
3727  return true;
3728}
3729
3730
3731bool HGraph::Optimize(BailoutReason* bailout_reason) {
3732  OrderBlocks();
3733  AssignDominators();
3734
3735  // We need to create a HConstant "zero" now so that GVN will fold every
3736  // zero-valued constant in the graph together.
3737  // The constant is needed to make idef-based bounds check work: the pass
3738  // evaluates relations with "zero" and that zero cannot be created after GVN.
3739  GetConstant0();
3740
3741#ifdef DEBUG
3742  // Do a full verify after building the graph and computing dominators.
3743  Verify(true);
3744#endif
3745
3746  if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
3747    Run<HEnvironmentLivenessAnalysisPhase>();
3748  }
3749
3750  if (!CheckConstPhiUses()) {
3751    *bailout_reason = kUnsupportedPhiUseOfConstVariable;
3752    return false;
3753  }
3754  Run<HRedundantPhiEliminationPhase>();
3755  if (!CheckArgumentsPhiUses()) {
3756    *bailout_reason = kUnsupportedPhiUseOfArguments;
3757    return false;
3758  }
3759
3760  // Find and mark unreachable code to simplify optimizations, especially gvn,
3761  // where unreachable code could unnecessarily defeat LICM.
3762  Run<HMarkUnreachableBlocksPhase>();
3763
3764  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
3765  if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
3766
3767  if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
3768
3769  CollectPhis();
3770
3771  if (has_osr()) osr()->FinishOsrValues();
3772
3773  Run<HInferRepresentationPhase>();
3774
3775  // Remove HSimulate instructions that have turned out not to be needed
3776  // after all by folding them into the following HSimulate.
3777  // This must happen after inferring representations.
3778  Run<HMergeRemovableSimulatesPhase>();
3779
3780  Run<HMarkDeoptimizeOnUndefinedPhase>();
3781  Run<HRepresentationChangesPhase>();
3782
3783  Run<HInferTypesPhase>();
3784
3785  // Must be performed before canonicalization to ensure that Canonicalize
3786  // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
3787  // zero.
3788  if (FLAG_opt_safe_uint32_operations) Run<HUint32AnalysisPhase>();
3789
3790  if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
3791
3792  if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
3793
3794  if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
3795
3796  if (FLAG_use_range) Run<HRangeAnalysisPhase>();
3797
3798  Run<HComputeChangeUndefinedToNaN>();
3799  Run<HComputeMinusZeroChecksPhase>();
3800
3801  // Eliminate redundant stack checks on backwards branches.
3802  Run<HStackCheckEliminationPhase>();
3803
3804  if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
3805  if (FLAG_array_bounds_checks_hoisting) Run<HBoundsCheckHoistingPhase>();
3806  if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
3807  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
3808
3809  RestoreActualValues();
3810
3811  // Find unreachable code a second time, GVN and other optimizations may have
3812  // made blocks unreachable that were previously reachable.
3813  Run<HMarkUnreachableBlocksPhase>();
3814
3815  return true;
3816}
3817
3818
3819void HGraph::RestoreActualValues() {
3820  HPhase phase("H_Restore actual values", this);
3821
3822  for (int block_index = 0; block_index < blocks()->length(); block_index++) {
3823    HBasicBlock* block = blocks()->at(block_index);
3824
3825#ifdef DEBUG
3826    for (int i = 0; i < block->phis()->length(); i++) {
3827      HPhi* phi = block->phis()->at(i);
3828      ASSERT(phi->ActualValue() == phi);
3829    }
3830#endif
3831
3832    for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
3833      HInstruction* instruction = it.Current();
3834      if (instruction->ActualValue() != instruction) {
3835        ASSERT(instruction->IsInformativeDefinition());
3836        if (instruction->IsPurelyInformativeDefinition()) {
3837          instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
3838        } else {
3839          instruction->ReplaceAllUsesWith(instruction->ActualValue());
3840        }
3841      }
3842    }
3843  }
3844}
3845
3846
3847template <class Instruction>
3848HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
3849  int count = call->argument_count();
3850  ZoneList<HValue*> arguments(count, zone());
3851  for (int i = 0; i < count; ++i) {
3852    arguments.Add(Pop(), zone());
3853  }
3854
3855  while (!arguments.is_empty()) {
3856    Add<HPushArgument>(arguments.RemoveLast());
3857  }
3858  return call;
3859}
3860
3861
3862void HOptimizedGraphBuilder::SetUpScope(Scope* scope) {
3863  // First special is HContext.
3864  HInstruction* context = Add<HContext>();
3865  environment()->BindContext(context);
3866
3867  // Create an arguments object containing the initial parameters.  Set the
3868  // initial values of parameters including "this" having parameter index 0.
3869  ASSERT_EQ(scope->num_parameters() + 1, environment()->parameter_count());
3870  HArgumentsObject* arguments_object =
3871      New<HArgumentsObject>(environment()->parameter_count());
3872  for (int i = 0; i < environment()->parameter_count(); ++i) {
3873    HInstruction* parameter = Add<HParameter>(i);
3874    arguments_object->AddArgument(parameter, zone());
3875    environment()->Bind(i, parameter);
3876  }
3877  AddInstruction(arguments_object);
3878  graph()->SetArgumentsObject(arguments_object);
3879
3880  HConstant* undefined_constant = graph()->GetConstantUndefined();
3881  // Initialize specials and locals to undefined.
3882  for (int i = environment()->parameter_count() + 1;
3883       i < environment()->length();
3884       ++i) {
3885    environment()->Bind(i, undefined_constant);
3886  }
3887
3888  // Handle the arguments and arguments shadow variables specially (they do
3889  // not have declarations).
3890  if (scope->arguments() != NULL) {
3891    if (!scope->arguments()->IsStackAllocated()) {
3892      return Bailout(kContextAllocatedArguments);
3893    }
3894
3895    environment()->Bind(scope->arguments(),
3896                        graph()->GetArgumentsObject());
3897  }
3898}
3899
3900
3901void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
3902  for (int i = 0; i < statements->length(); i++) {
3903    Statement* stmt = statements->at(i);
3904    CHECK_ALIVE(Visit(stmt));
3905    if (stmt->IsJump()) break;
3906  }
3907}
3908
3909
3910void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
3911  ASSERT(!HasStackOverflow());
3912  ASSERT(current_block() != NULL);
3913  ASSERT(current_block()->HasPredecessor());
3914  if (stmt->scope() != NULL) {
3915    return Bailout(kScopedBlock);
3916  }
3917  BreakAndContinueInfo break_info(stmt);
3918  { BreakAndContinueScope push(&break_info, this);
3919    CHECK_BAILOUT(VisitStatements(stmt->statements()));
3920  }
3921  HBasicBlock* break_block = break_info.break_block();
3922  if (break_block != NULL) {
3923    if (current_block() != NULL) Goto(break_block);
3924    break_block->SetJoinId(stmt->ExitId());
3925    set_current_block(break_block);
3926  }
3927}
3928
3929
3930void HOptimizedGraphBuilder::VisitExpressionStatement(
3931    ExpressionStatement* stmt) {
3932  ASSERT(!HasStackOverflow());
3933  ASSERT(current_block() != NULL);
3934  ASSERT(current_block()->HasPredecessor());
3935  VisitForEffect(stmt->expression());
3936}
3937
3938
3939void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
3940  ASSERT(!HasStackOverflow());
3941  ASSERT(current_block() != NULL);
3942  ASSERT(current_block()->HasPredecessor());
3943}
3944
3945
3946void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
3947  ASSERT(!HasStackOverflow());
3948  ASSERT(current_block() != NULL);
3949  ASSERT(current_block()->HasPredecessor());
3950  if (stmt->condition()->ToBooleanIsTrue()) {
3951    Add<HSimulate>(stmt->ThenId());
3952    Visit(stmt->then_statement());
3953  } else if (stmt->condition()->ToBooleanIsFalse()) {
3954    Add<HSimulate>(stmt->ElseId());
3955    Visit(stmt->else_statement());
3956  } else {
3957    HBasicBlock* cond_true = graph()->CreateBasicBlock();
3958    HBasicBlock* cond_false = graph()->CreateBasicBlock();
3959    CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
3960
3961    if (cond_true->HasPredecessor()) {
3962      cond_true->SetJoinId(stmt->ThenId());
3963      set_current_block(cond_true);
3964      CHECK_BAILOUT(Visit(stmt->then_statement()));
3965      cond_true = current_block();
3966    } else {
3967      cond_true = NULL;
3968    }
3969
3970    if (cond_false->HasPredecessor()) {
3971      cond_false->SetJoinId(stmt->ElseId());
3972      set_current_block(cond_false);
3973      CHECK_BAILOUT(Visit(stmt->else_statement()));
3974      cond_false = current_block();
3975    } else {
3976      cond_false = NULL;
3977    }
3978
3979    HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
3980    set_current_block(join);
3981  }
3982}
3983
3984
3985HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
3986    BreakableStatement* stmt,
3987    BreakType type,
3988    int* drop_extra) {
3989  *drop_extra = 0;
3990  BreakAndContinueScope* current = this;
3991  while (current != NULL && current->info()->target() != stmt) {
3992    *drop_extra += current->info()->drop_extra();
3993    current = current->next();
3994  }
3995  ASSERT(current != NULL);  // Always found (unless stack is malformed).
3996
3997  if (type == BREAK) {
3998    *drop_extra += current->info()->drop_extra();
3999  }
4000
4001  HBasicBlock* block = NULL;
4002  switch (type) {
4003    case BREAK:
4004      block = current->info()->break_block();
4005      if (block == NULL) {
4006        block = current->owner()->graph()->CreateBasicBlock();
4007        current->info()->set_break_block(block);
4008      }
4009      break;
4010
4011    case CONTINUE:
4012      block = current->info()->continue_block();
4013      if (block == NULL) {
4014        block = current->owner()->graph()->CreateBasicBlock();
4015        current->info()->set_continue_block(block);
4016      }
4017      break;
4018  }
4019
4020  return block;
4021}
4022
4023
4024void HOptimizedGraphBuilder::VisitContinueStatement(
4025    ContinueStatement* stmt) {
4026  ASSERT(!HasStackOverflow());
4027  ASSERT(current_block() != NULL);
4028  ASSERT(current_block()->HasPredecessor());
4029  int drop_extra = 0;
4030  HBasicBlock* continue_block = break_scope()->Get(
4031      stmt->target(), BreakAndContinueScope::CONTINUE, &drop_extra);
4032  Drop(drop_extra);
4033  Goto(continue_block);
4034  set_current_block(NULL);
4035}
4036
4037
4038void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4039  ASSERT(!HasStackOverflow());
4040  ASSERT(current_block() != NULL);
4041  ASSERT(current_block()->HasPredecessor());
4042  int drop_extra = 0;
4043  HBasicBlock* break_block = break_scope()->Get(
4044      stmt->target(), BreakAndContinueScope::BREAK, &drop_extra);
4045  Drop(drop_extra);
4046  Goto(break_block);
4047  set_current_block(NULL);
4048}
4049
4050
4051void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4052  ASSERT(!HasStackOverflow());
4053  ASSERT(current_block() != NULL);
4054  ASSERT(current_block()->HasPredecessor());
4055  FunctionState* state = function_state();
4056  AstContext* context = call_context();
4057  if (context == NULL) {
4058    // Not an inlined return, so an actual one.
4059    CHECK_ALIVE(VisitForValue(stmt->expression()));
4060    HValue* result = environment()->Pop();
4061    Add<HReturn>(result);
4062  } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4063    // Return from an inlined construct call. In a test context the return value
4064    // will always evaluate to true, in a value context the return value needs
4065    // to be a JSObject.
4066    if (context->IsTest()) {
4067      TestContext* test = TestContext::cast(context);
4068      CHECK_ALIVE(VisitForEffect(stmt->expression()));
4069      Goto(test->if_true(), state);
4070    } else if (context->IsEffect()) {
4071      CHECK_ALIVE(VisitForEffect(stmt->expression()));
4072      Goto(function_return(), state);
4073    } else {
4074      ASSERT(context->IsValue());
4075      CHECK_ALIVE(VisitForValue(stmt->expression()));
4076      HValue* return_value = Pop();
4077      HValue* receiver = environment()->arguments_environment()->Lookup(0);
4078      HHasInstanceTypeAndBranch* typecheck =
4079          New<HHasInstanceTypeAndBranch>(return_value,
4080                                         FIRST_SPEC_OBJECT_TYPE,
4081                                         LAST_SPEC_OBJECT_TYPE);
4082      HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4083      HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4084      typecheck->SetSuccessorAt(0, if_spec_object);
4085      typecheck->SetSuccessorAt(1, not_spec_object);
4086      FinishCurrentBlock(typecheck);
4087      AddLeaveInlined(if_spec_object, return_value, state);
4088      AddLeaveInlined(not_spec_object, receiver, state);
4089    }
4090  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4091    // Return from an inlined setter call. The returned value is never used, the
4092    // value of an assignment is always the value of the RHS of the assignment.
4093    CHECK_ALIVE(VisitForEffect(stmt->expression()));
4094    if (context->IsTest()) {
4095      HValue* rhs = environment()->arguments_environment()->Lookup(1);
4096      context->ReturnValue(rhs);
4097    } else if (context->IsEffect()) {
4098      Goto(function_return(), state);
4099    } else {
4100      ASSERT(context->IsValue());
4101      HValue* rhs = environment()->arguments_environment()->Lookup(1);
4102      AddLeaveInlined(rhs, state);
4103    }
4104  } else {
4105    // Return from a normal inlined function. Visit the subexpression in the
4106    // expression context of the call.
4107    if (context->IsTest()) {
4108      TestContext* test = TestContext::cast(context);
4109      VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4110    } else if (context->IsEffect()) {
4111      CHECK_ALIVE(VisitForEffect(stmt->expression()));
4112      Goto(function_return(), state);
4113    } else {
4114      ASSERT(context->IsValue());
4115      CHECK_ALIVE(VisitForValue(stmt->expression()));
4116      AddLeaveInlined(Pop(), state);
4117    }
4118  }
4119  set_current_block(NULL);
4120}
4121
4122
4123void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4124  ASSERT(!HasStackOverflow());
4125  ASSERT(current_block() != NULL);
4126  ASSERT(current_block()->HasPredecessor());
4127  return Bailout(kWithStatement);
4128}
4129
4130
4131void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4132  ASSERT(!HasStackOverflow());
4133  ASSERT(current_block() != NULL);
4134  ASSERT(current_block()->HasPredecessor());
4135
4136  // We only optimize switch statements with smi-literal smi comparisons,
4137  // with a bounded number of clauses.
4138  const int kCaseClauseLimit = 128;
4139  ZoneList<CaseClause*>* clauses = stmt->cases();
4140  int clause_count = clauses->length();
4141  if (clause_count > kCaseClauseLimit) {
4142    return Bailout(kSwitchStatementTooManyClauses);
4143  }
4144
4145  ASSERT(stmt->switch_type() != SwitchStatement::UNKNOWN_SWITCH);
4146  if (stmt->switch_type() == SwitchStatement::GENERIC_SWITCH) {
4147    return Bailout(kSwitchStatementMixedOrNonLiteralSwitchLabels);
4148  }
4149
4150  CHECK_ALIVE(VisitForValue(stmt->tag()));
4151  Add<HSimulate>(stmt->EntryId());
4152  HValue* tag_value = Pop();
4153  HBasicBlock* first_test_block = current_block();
4154
4155  HUnaryControlInstruction* string_check = NULL;
4156  HBasicBlock* not_string_block = NULL;
4157
4158  // Test switch's tag value if all clauses are string literals
4159  if (stmt->switch_type() == SwitchStatement::STRING_SWITCH) {
4160    first_test_block = graph()->CreateBasicBlock();
4161    not_string_block = graph()->CreateBasicBlock();
4162    string_check = New<HIsStringAndBranch>(
4163        tag_value, first_test_block, not_string_block);
4164    FinishCurrentBlock(string_check);
4165
4166    set_current_block(first_test_block);
4167  }
4168
4169  // 1. Build all the tests, with dangling true branches
4170  BailoutId default_id = BailoutId::None();
4171  for (int i = 0; i < clause_count; ++i) {
4172    CaseClause* clause = clauses->at(i);
4173    if (clause->is_default()) {
4174      default_id = clause->EntryId();
4175      continue;
4176    }
4177
4178    // Generate a compare and branch.
4179    CHECK_ALIVE(VisitForValue(clause->label()));
4180    HValue* label_value = Pop();
4181
4182    HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4183    HBasicBlock* body_block = graph()->CreateBasicBlock();
4184
4185    HControlInstruction* compare;
4186
4187    if (stmt->switch_type() == SwitchStatement::SMI_SWITCH) {
4188      if (!clause->compare_type()->Is(Type::Smi())) {
4189        Add<HDeoptimize>("Non-smi switch type", Deoptimizer::SOFT);
4190      }
4191
4192      HCompareNumericAndBranch* compare_ =
4193          New<HCompareNumericAndBranch>(tag_value,
4194                                        label_value,
4195                                        Token::EQ_STRICT);
4196      compare_->set_observed_input_representation(
4197          Representation::Smi(), Representation::Smi());
4198      compare = compare_;
4199    } else {
4200      compare = New<HStringCompareAndBranch>(tag_value,
4201                                             label_value,
4202                                             Token::EQ_STRICT);
4203    }
4204
4205    compare->SetSuccessorAt(0, body_block);
4206    compare->SetSuccessorAt(1, next_test_block);
4207    FinishCurrentBlock(compare);
4208
4209    set_current_block(next_test_block);
4210  }
4211
4212  // Save the current block to use for the default or to join with the
4213  // exit.
4214  HBasicBlock* last_block = current_block();
4215
4216  if (not_string_block != NULL) {
4217    BailoutId join_id = !default_id.IsNone() ? default_id : stmt->ExitId();
4218    last_block = CreateJoin(last_block, not_string_block, join_id);
4219  }
4220
4221  // 2. Loop over the clauses and the linked list of tests in lockstep,
4222  // translating the clause bodies.
4223  HBasicBlock* curr_test_block = first_test_block;
4224  HBasicBlock* fall_through_block = NULL;
4225
4226  BreakAndContinueInfo break_info(stmt);
4227  { BreakAndContinueScope push(&break_info, this);
4228    for (int i = 0; i < clause_count; ++i) {
4229      CaseClause* clause = clauses->at(i);
4230
4231      // Identify the block where normal (non-fall-through) control flow
4232      // goes to.
4233      HBasicBlock* normal_block = NULL;
4234      if (clause->is_default()) {
4235        if (last_block != NULL) {
4236          normal_block = last_block;
4237          last_block = NULL;  // Cleared to indicate we've handled it.
4238        }
4239      } else {
4240        // If the current test block is deoptimizing due to an unhandled clause
4241        // of the switch, the test instruction is in the next block since the
4242        // deopt must end the current block.
4243        if (curr_test_block->IsDeoptimizing()) {
4244          ASSERT(curr_test_block->end()->SecondSuccessor() == NULL);
4245          curr_test_block = curr_test_block->end()->FirstSuccessor();
4246        }
4247        normal_block = curr_test_block->end()->FirstSuccessor();
4248        curr_test_block = curr_test_block->end()->SecondSuccessor();
4249      }
4250
4251      // Identify a block to emit the body into.
4252      if (normal_block == NULL) {
4253        if (fall_through_block == NULL) {
4254          // (a) Unreachable.
4255          if (clause->is_default()) {
4256            continue;  // Might still be reachable clause bodies.
4257          } else {
4258            break;
4259          }
4260        } else {
4261          // (b) Reachable only as fall through.
4262          set_current_block(fall_through_block);
4263        }
4264      } else if (fall_through_block == NULL) {
4265        // (c) Reachable only normally.
4266        set_current_block(normal_block);
4267      } else {
4268        // (d) Reachable both ways.
4269        HBasicBlock* join = CreateJoin(fall_through_block,
4270                                       normal_block,
4271                                       clause->EntryId());
4272        set_current_block(join);
4273      }
4274
4275      CHECK_BAILOUT(VisitStatements(clause->statements()));
4276      fall_through_block = current_block();
4277    }
4278  }
4279
4280  // Create an up-to-3-way join.  Use the break block if it exists since
4281  // it's already a join block.
4282  HBasicBlock* break_block = break_info.break_block();
4283  if (break_block == NULL) {
4284    set_current_block(CreateJoin(fall_through_block,
4285                                 last_block,
4286                                 stmt->ExitId()));
4287  } else {
4288    if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4289    if (last_block != NULL) Goto(last_block, break_block);
4290    break_block->SetJoinId(stmt->ExitId());
4291    set_current_block(break_block);
4292  }
4293}
4294
4295
4296void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4297                                           HBasicBlock* loop_entry,
4298                                           BreakAndContinueInfo* break_info) {
4299  BreakAndContinueScope push(break_info, this);
4300  Add<HSimulate>(stmt->StackCheckId());
4301  HStackCheck* stack_check =
4302      HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4303  ASSERT(loop_entry->IsLoopHeader());
4304  loop_entry->loop_information()->set_stack_check(stack_check);
4305  CHECK_BAILOUT(Visit(stmt->body()));
4306}
4307
4308
4309void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4310  ASSERT(!HasStackOverflow());
4311  ASSERT(current_block() != NULL);
4312  ASSERT(current_block()->HasPredecessor());
4313  ASSERT(current_block() != NULL);
4314  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4315
4316  BreakAndContinueInfo break_info(stmt);
4317  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4318  HBasicBlock* body_exit =
4319      JoinContinue(stmt, current_block(), break_info.continue_block());
4320  HBasicBlock* loop_successor = NULL;
4321  if (body_exit != NULL && !stmt->cond()->ToBooleanIsTrue()) {
4322    set_current_block(body_exit);
4323    // The block for a true condition, the actual predecessor block of the
4324    // back edge.
4325    body_exit = graph()->CreateBasicBlock();
4326    loop_successor = graph()->CreateBasicBlock();
4327    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4328    if (body_exit->HasPredecessor()) {
4329      body_exit->SetJoinId(stmt->BackEdgeId());
4330    } else {
4331      body_exit = NULL;
4332    }
4333    if (loop_successor->HasPredecessor()) {
4334      loop_successor->SetJoinId(stmt->ExitId());
4335    } else {
4336      loop_successor = NULL;
4337    }
4338  }
4339  HBasicBlock* loop_exit = CreateLoop(stmt,
4340                                      loop_entry,
4341                                      body_exit,
4342                                      loop_successor,
4343                                      break_info.break_block());
4344  set_current_block(loop_exit);
4345}
4346
4347
4348void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4349  ASSERT(!HasStackOverflow());
4350  ASSERT(current_block() != NULL);
4351  ASSERT(current_block()->HasPredecessor());
4352  ASSERT(current_block() != NULL);
4353  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4354
4355  // If the condition is constant true, do not generate a branch.
4356  HBasicBlock* loop_successor = NULL;
4357  if (!stmt->cond()->ToBooleanIsTrue()) {
4358    HBasicBlock* body_entry = graph()->CreateBasicBlock();
4359    loop_successor = graph()->CreateBasicBlock();
4360    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4361    if (body_entry->HasPredecessor()) {
4362      body_entry->SetJoinId(stmt->BodyId());
4363      set_current_block(body_entry);
4364    }
4365    if (loop_successor->HasPredecessor()) {
4366      loop_successor->SetJoinId(stmt->ExitId());
4367    } else {
4368      loop_successor = NULL;
4369    }
4370  }
4371
4372  BreakAndContinueInfo break_info(stmt);
4373  if (current_block() != NULL) {
4374    CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4375  }
4376  HBasicBlock* body_exit =
4377      JoinContinue(stmt, current_block(), break_info.continue_block());
4378  HBasicBlock* loop_exit = CreateLoop(stmt,
4379                                      loop_entry,
4380                                      body_exit,
4381                                      loop_successor,
4382                                      break_info.break_block());
4383  set_current_block(loop_exit);
4384}
4385
4386
4387void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4388  ASSERT(!HasStackOverflow());
4389  ASSERT(current_block() != NULL);
4390  ASSERT(current_block()->HasPredecessor());
4391  if (stmt->init() != NULL) {
4392    CHECK_ALIVE(Visit(stmt->init()));
4393  }
4394  ASSERT(current_block() != NULL);
4395  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4396
4397  HBasicBlock* loop_successor = NULL;
4398  if (stmt->cond() != NULL) {
4399    HBasicBlock* body_entry = graph()->CreateBasicBlock();
4400    loop_successor = graph()->CreateBasicBlock();
4401    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4402    if (body_entry->HasPredecessor()) {
4403      body_entry->SetJoinId(stmt->BodyId());
4404      set_current_block(body_entry);
4405    }
4406    if (loop_successor->HasPredecessor()) {
4407      loop_successor->SetJoinId(stmt->ExitId());
4408    } else {
4409      loop_successor = NULL;
4410    }
4411  }
4412
4413  BreakAndContinueInfo break_info(stmt);
4414  if (current_block() != NULL) {
4415    CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4416  }
4417  HBasicBlock* body_exit =
4418      JoinContinue(stmt, current_block(), break_info.continue_block());
4419
4420  if (stmt->next() != NULL && body_exit != NULL) {
4421    set_current_block(body_exit);
4422    CHECK_BAILOUT(Visit(stmt->next()));
4423    body_exit = current_block();
4424  }
4425
4426  HBasicBlock* loop_exit = CreateLoop(stmt,
4427                                      loop_entry,
4428                                      body_exit,
4429                                      loop_successor,
4430                                      break_info.break_block());
4431  set_current_block(loop_exit);
4432}
4433
4434
4435void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4436  ASSERT(!HasStackOverflow());
4437  ASSERT(current_block() != NULL);
4438  ASSERT(current_block()->HasPredecessor());
4439
4440  if (!FLAG_optimize_for_in) {
4441    return Bailout(kForInStatementOptimizationIsDisabled);
4442  }
4443
4444  if (stmt->for_in_type() != ForInStatement::FAST_FOR_IN) {
4445    return Bailout(kForInStatementIsNotFastCase);
4446  }
4447
4448  if (!stmt->each()->IsVariableProxy() ||
4449      !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4450    return Bailout(kForInStatementWithNonLocalEachVariable);
4451  }
4452
4453  Variable* each_var = stmt->each()->AsVariableProxy()->var();
4454
4455  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4456  HValue* enumerable = Top();  // Leave enumerable at the top.
4457
4458  HInstruction* map = Add<HForInPrepareMap>(enumerable);
4459  Add<HSimulate>(stmt->PrepareId());
4460
4461  HInstruction* array = Add<HForInCacheArray>(
4462      enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4463
4464  HInstruction* enum_length = Add<HMapEnumLength>(map);
4465
4466  HInstruction* start_index = Add<HConstant>(0);
4467
4468  Push(map);
4469  Push(array);
4470  Push(enum_length);
4471  Push(start_index);
4472
4473  HInstruction* index_cache = Add<HForInCacheArray>(
4474      enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
4475  HForInCacheArray::cast(array)->set_index_cache(
4476      HForInCacheArray::cast(index_cache));
4477
4478  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4479
4480  HValue* index = environment()->ExpressionStackAt(0);
4481  HValue* limit = environment()->ExpressionStackAt(1);
4482
4483  // Check that we still have more keys.
4484  HCompareNumericAndBranch* compare_index =
4485      New<HCompareNumericAndBranch>(index, limit, Token::LT);
4486  compare_index->set_observed_input_representation(
4487      Representation::Smi(), Representation::Smi());
4488
4489  HBasicBlock* loop_body = graph()->CreateBasicBlock();
4490  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4491
4492  compare_index->SetSuccessorAt(0, loop_body);
4493  compare_index->SetSuccessorAt(1, loop_successor);
4494  FinishCurrentBlock(compare_index);
4495
4496  set_current_block(loop_successor);
4497  Drop(5);
4498
4499  set_current_block(loop_body);
4500
4501  HValue* key = Add<HLoadKeyed>(
4502      environment()->ExpressionStackAt(2),  // Enum cache.
4503      environment()->ExpressionStackAt(0),  // Iteration index.
4504      environment()->ExpressionStackAt(0),
4505      FAST_ELEMENTS);
4506
4507  // Check if the expected map still matches that of the enumerable.
4508  // If not just deoptimize.
4509  Add<HCheckMapValue>(environment()->ExpressionStackAt(4),
4510                      environment()->ExpressionStackAt(3));
4511
4512  Bind(each_var, key);
4513
4514  BreakAndContinueInfo break_info(stmt, 5);
4515  CHECK_BAILOUT(VisitLoopBody(stmt, loop_entry, &break_info));
4516
4517  HBasicBlock* body_exit =
4518      JoinContinue(stmt, current_block(), break_info.continue_block());
4519
4520  if (body_exit != NULL) {
4521    set_current_block(body_exit);
4522
4523    HValue* current_index = Pop();
4524    Push(AddUncasted<HAdd>(current_index, graph()->GetConstant1()));
4525    body_exit = current_block();
4526  }
4527
4528  HBasicBlock* loop_exit = CreateLoop(stmt,
4529                                      loop_entry,
4530                                      body_exit,
4531                                      loop_successor,
4532                                      break_info.break_block());
4533
4534  set_current_block(loop_exit);
4535}
4536
4537
4538void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
4539  ASSERT(!HasStackOverflow());
4540  ASSERT(current_block() != NULL);
4541  ASSERT(current_block()->HasPredecessor());
4542  return Bailout(kForOfStatement);
4543}
4544
4545
4546void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
4547  ASSERT(!HasStackOverflow());
4548  ASSERT(current_block() != NULL);
4549  ASSERT(current_block()->HasPredecessor());
4550  return Bailout(kTryCatchStatement);
4551}
4552
4553
4554void HOptimizedGraphBuilder::VisitTryFinallyStatement(
4555    TryFinallyStatement* stmt) {
4556  ASSERT(!HasStackOverflow());
4557  ASSERT(current_block() != NULL);
4558  ASSERT(current_block()->HasPredecessor());
4559  return Bailout(kTryFinallyStatement);
4560}
4561
4562
4563void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
4564  ASSERT(!HasStackOverflow());
4565  ASSERT(current_block() != NULL);
4566  ASSERT(current_block()->HasPredecessor());
4567  return Bailout(kDebuggerStatement);
4568}
4569
4570
4571void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
4572  UNREACHABLE();
4573}
4574
4575
4576static Handle<SharedFunctionInfo> SearchSharedFunctionInfo(
4577    Code* unoptimized_code, FunctionLiteral* expr) {
4578  int start_position = expr->start_position();
4579  for (RelocIterator it(unoptimized_code); !it.done(); it.next()) {
4580    RelocInfo* rinfo = it.rinfo();
4581    if (rinfo->rmode() != RelocInfo::EMBEDDED_OBJECT) continue;
4582    Object* obj = rinfo->target_object();
4583    if (obj->IsSharedFunctionInfo()) {
4584      SharedFunctionInfo* shared = SharedFunctionInfo::cast(obj);
4585      if (shared->start_position() == start_position) {
4586        return Handle<SharedFunctionInfo>(shared);
4587      }
4588    }
4589  }
4590
4591  return Handle<SharedFunctionInfo>();
4592}
4593
4594
4595void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
4596  ASSERT(!HasStackOverflow());
4597  ASSERT(current_block() != NULL);
4598  ASSERT(current_block()->HasPredecessor());
4599  Handle<SharedFunctionInfo> shared_info =
4600      SearchSharedFunctionInfo(current_info()->shared_info()->code(), expr);
4601  if (shared_info.is_null()) {
4602    shared_info = Compiler::BuildFunctionInfo(expr, current_info()->script());
4603  }
4604  // We also have a stack overflow if the recursive compilation did.
4605  if (HasStackOverflow()) return;
4606  HFunctionLiteral* instr =
4607      New<HFunctionLiteral>(shared_info, expr->pretenure());
4608  return ast_context()->ReturnInstruction(instr, expr->id());
4609}
4610
4611
4612void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
4613    NativeFunctionLiteral* expr) {
4614  ASSERT(!HasStackOverflow());
4615  ASSERT(current_block() != NULL);
4616  ASSERT(current_block()->HasPredecessor());
4617  return Bailout(kNativeFunctionLiteral);
4618}
4619
4620
4621void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
4622  ASSERT(!HasStackOverflow());
4623  ASSERT(current_block() != NULL);
4624  ASSERT(current_block()->HasPredecessor());
4625  HBasicBlock* cond_true = graph()->CreateBasicBlock();
4626  HBasicBlock* cond_false = graph()->CreateBasicBlock();
4627  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
4628
4629  // Visit the true and false subexpressions in the same AST context as the
4630  // whole expression.
4631  if (cond_true->HasPredecessor()) {
4632    cond_true->SetJoinId(expr->ThenId());
4633    set_current_block(cond_true);
4634    CHECK_BAILOUT(Visit(expr->then_expression()));
4635    cond_true = current_block();
4636  } else {
4637    cond_true = NULL;
4638  }
4639
4640  if (cond_false->HasPredecessor()) {
4641    cond_false->SetJoinId(expr->ElseId());
4642    set_current_block(cond_false);
4643    CHECK_BAILOUT(Visit(expr->else_expression()));
4644    cond_false = current_block();
4645  } else {
4646    cond_false = NULL;
4647  }
4648
4649  if (!ast_context()->IsTest()) {
4650    HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
4651    set_current_block(join);
4652    if (join != NULL && !ast_context()->IsEffect()) {
4653      return ast_context()->ReturnValue(Pop());
4654    }
4655  }
4656}
4657
4658
4659HOptimizedGraphBuilder::GlobalPropertyAccess
4660    HOptimizedGraphBuilder::LookupGlobalProperty(
4661        Variable* var, LookupResult* lookup, bool is_store) {
4662  if (var->is_this() || !current_info()->has_global_object()) {
4663    return kUseGeneric;
4664  }
4665  Handle<GlobalObject> global(current_info()->global_object());
4666  global->Lookup(*var->name(), lookup);
4667  if (!lookup->IsNormal() ||
4668      (is_store && lookup->IsReadOnly()) ||
4669      lookup->holder() != *global) {
4670    return kUseGeneric;
4671  }
4672
4673  return kUseCell;
4674}
4675
4676
4677HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
4678  ASSERT(var->IsContextSlot());
4679  HValue* context = environment()->context();
4680  int length = current_info()->scope()->ContextChainLength(var->scope());
4681  while (length-- > 0) {
4682    context = Add<HOuterContext>(context);
4683  }
4684  return context;
4685}
4686
4687
4688void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
4689  ASSERT(!HasStackOverflow());
4690  ASSERT(current_block() != NULL);
4691  ASSERT(current_block()->HasPredecessor());
4692  Variable* variable = expr->var();
4693  switch (variable->location()) {
4694    case Variable::UNALLOCATED: {
4695      if (IsLexicalVariableMode(variable->mode())) {
4696        // TODO(rossberg): should this be an ASSERT?
4697        return Bailout(kReferenceToGlobalLexicalVariable);
4698      }
4699      // Handle known global constants like 'undefined' specially to avoid a
4700      // load from a global cell for them.
4701      Handle<Object> constant_value =
4702          isolate()->factory()->GlobalConstantFor(variable->name());
4703      if (!constant_value.is_null()) {
4704        HConstant* instr = New<HConstant>(constant_value);
4705        return ast_context()->ReturnInstruction(instr, expr->id());
4706      }
4707
4708      LookupResult lookup(isolate());
4709      GlobalPropertyAccess type =
4710          LookupGlobalProperty(variable, &lookup, false);
4711
4712      if (type == kUseCell &&
4713          current_info()->global_object()->IsAccessCheckNeeded()) {
4714        type = kUseGeneric;
4715      }
4716
4717      if (type == kUseCell) {
4718        Handle<GlobalObject> global(current_info()->global_object());
4719        Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
4720        if (cell->type()->IsConstant()) {
4721          cell->AddDependentCompilationInfo(top_info());
4722          Handle<Object> constant_object = cell->type()->AsConstant();
4723          if (constant_object->IsConsString()) {
4724            constant_object =
4725                FlattenGetString(Handle<String>::cast(constant_object));
4726          }
4727          HConstant* constant = New<HConstant>(constant_object);
4728          return ast_context()->ReturnInstruction(constant, expr->id());
4729        } else {
4730          HLoadGlobalCell* instr =
4731              New<HLoadGlobalCell>(cell, lookup.GetPropertyDetails());
4732          return ast_context()->ReturnInstruction(instr, expr->id());
4733        }
4734      } else {
4735        HGlobalObject* global_object = Add<HGlobalObject>();
4736        HLoadGlobalGeneric* instr =
4737            New<HLoadGlobalGeneric>(global_object,
4738                                    variable->name(),
4739                                    ast_context()->is_for_typeof());
4740        return ast_context()->ReturnInstruction(instr, expr->id());
4741      }
4742    }
4743
4744    case Variable::PARAMETER:
4745    case Variable::LOCAL: {
4746      HValue* value = LookupAndMakeLive(variable);
4747      if (value == graph()->GetConstantHole()) {
4748        ASSERT(IsDeclaredVariableMode(variable->mode()) &&
4749               variable->mode() != VAR);
4750        return Bailout(kReferenceToUninitializedVariable);
4751      }
4752      return ast_context()->ReturnValue(value);
4753    }
4754
4755    case Variable::CONTEXT: {
4756      HValue* context = BuildContextChainWalk(variable);
4757      HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable);
4758      return ast_context()->ReturnInstruction(instr, expr->id());
4759    }
4760
4761    case Variable::LOOKUP:
4762      return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
4763  }
4764}
4765
4766
4767void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
4768  ASSERT(!HasStackOverflow());
4769  ASSERT(current_block() != NULL);
4770  ASSERT(current_block()->HasPredecessor());
4771  HConstant* instr = New<HConstant>(expr->value());
4772  return ast_context()->ReturnInstruction(instr, expr->id());
4773}
4774
4775
4776void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
4777  ASSERT(!HasStackOverflow());
4778  ASSERT(current_block() != NULL);
4779  ASSERT(current_block()->HasPredecessor());
4780  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4781  Handle<FixedArray> literals(closure->literals());
4782  HRegExpLiteral* instr = New<HRegExpLiteral>(literals,
4783                                              expr->pattern(),
4784                                              expr->flags(),
4785                                              expr->literal_index());
4786  return ast_context()->ReturnInstruction(instr, expr->id());
4787}
4788
4789
4790static bool CanInlinePropertyAccess(Map* type) {
4791  return type->IsJSObjectMap() &&
4792      !type->is_dictionary_map() &&
4793      !type->has_named_interceptor();
4794}
4795
4796
4797static void LookupInPrototypes(Handle<Map> map,
4798                               Handle<String> name,
4799                               LookupResult* lookup) {
4800  while (map->prototype()->IsJSObject()) {
4801    Handle<JSObject> holder(JSObject::cast(map->prototype()));
4802    map = Handle<Map>(holder->map());
4803    if (!CanInlinePropertyAccess(*map)) break;
4804    map->LookupDescriptor(*holder, *name, lookup);
4805    if (lookup->IsFound()) return;
4806  }
4807  lookup->NotFound();
4808}
4809
4810
4811// Tries to find a JavaScript accessor of the given name in the prototype chain
4812// starting at the given map. Return true iff there is one, including the
4813// corresponding AccessorPair plus its holder (which could be null when the
4814// accessor is found directly in the given map).
4815static bool LookupAccessorPair(Handle<Map> map,
4816                               Handle<String> name,
4817                               Handle<AccessorPair>* accessors,
4818                               Handle<JSObject>* holder) {
4819  Isolate* isolate = map->GetIsolate();
4820  LookupResult lookup(isolate);
4821
4822  // Check for a JavaScript accessor directly in the map.
4823  map->LookupDescriptor(NULL, *name, &lookup);
4824  if (lookup.IsPropertyCallbacks()) {
4825    Handle<Object> callback(lookup.GetValueFromMap(*map), isolate);
4826    if (!callback->IsAccessorPair()) return false;
4827    *accessors = Handle<AccessorPair>::cast(callback);
4828    *holder = Handle<JSObject>();
4829    return true;
4830  }
4831
4832  // Everything else, e.g. a field, can't be an accessor call.
4833  if (lookup.IsFound()) return false;
4834
4835  // Check for a JavaScript accessor somewhere in the proto chain.
4836  LookupInPrototypes(map, name, &lookup);
4837  if (lookup.IsPropertyCallbacks()) {
4838    Handle<Object> callback(lookup.GetValue(), isolate);
4839    if (!callback->IsAccessorPair()) return false;
4840    *accessors = Handle<AccessorPair>::cast(callback);
4841    *holder = Handle<JSObject>(lookup.holder());
4842    return true;
4843  }
4844
4845  // We haven't found a JavaScript accessor anywhere.
4846  return false;
4847}
4848
4849
4850static bool LookupSetter(Handle<Map> map,
4851                         Handle<String> name,
4852                         Handle<JSFunction>* setter,
4853                         Handle<JSObject>* holder) {
4854  Handle<AccessorPair> accessors;
4855  if (LookupAccessorPair(map, name, &accessors, holder) &&
4856      accessors->setter()->IsJSFunction()) {
4857    Handle<JSFunction> func(JSFunction::cast(accessors->setter()));
4858    CallOptimization call_optimization(func);
4859    // TODO(dcarney): temporary hack unless crankshaft can handle api calls.
4860    if (call_optimization.is_simple_api_call()) return false;
4861    *setter = func;
4862    return true;
4863  }
4864  return false;
4865}
4866
4867
4868// Determines whether the given array or object literal boilerplate satisfies
4869// all limits to be considered for fast deep-copying and computes the total
4870// size of all objects that are part of the graph.
4871static bool IsFastLiteral(Handle<JSObject> boilerplate,
4872                          int max_depth,
4873                          int* max_properties) {
4874  if (boilerplate->map()->is_deprecated()) {
4875    Handle<Object> result = JSObject::TryMigrateInstance(boilerplate);
4876    if (result.is_null()) return false;
4877  }
4878
4879  ASSERT(max_depth >= 0 && *max_properties >= 0);
4880  if (max_depth == 0) return false;
4881
4882  Isolate* isolate = boilerplate->GetIsolate();
4883  Handle<FixedArrayBase> elements(boilerplate->elements());
4884  if (elements->length() > 0 &&
4885      elements->map() != isolate->heap()->fixed_cow_array_map()) {
4886    if (boilerplate->HasFastObjectElements()) {
4887      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
4888      int length = elements->length();
4889      for (int i = 0; i < length; i++) {
4890        if ((*max_properties)-- == 0) return false;
4891        Handle<Object> value(fast_elements->get(i), isolate);
4892        if (value->IsJSObject()) {
4893          Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4894          if (!IsFastLiteral(value_object,
4895                             max_depth - 1,
4896                             max_properties)) {
4897            return false;
4898          }
4899        }
4900      }
4901    } else if (!boilerplate->HasFastDoubleElements()) {
4902      return false;
4903    }
4904  }
4905
4906  Handle<FixedArray> properties(boilerplate->properties());
4907  if (properties->length() > 0) {
4908    return false;
4909  } else {
4910    Handle<DescriptorArray> descriptors(
4911        boilerplate->map()->instance_descriptors());
4912    int limit = boilerplate->map()->NumberOfOwnDescriptors();
4913    for (int i = 0; i < limit; i++) {
4914      PropertyDetails details = descriptors->GetDetails(i);
4915      if (details.type() != FIELD) continue;
4916      int index = descriptors->GetFieldIndex(i);
4917      if ((*max_properties)-- == 0) return false;
4918      Handle<Object> value(boilerplate->InObjectPropertyAt(index), isolate);
4919      if (value->IsJSObject()) {
4920        Handle<JSObject> value_object = Handle<JSObject>::cast(value);
4921        if (!IsFastLiteral(value_object,
4922                           max_depth - 1,
4923                           max_properties)) {
4924          return false;
4925        }
4926      }
4927    }
4928  }
4929  return true;
4930}
4931
4932
4933void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
4934  ASSERT(!HasStackOverflow());
4935  ASSERT(current_block() != NULL);
4936  ASSERT(current_block()->HasPredecessor());
4937  expr->BuildConstantProperties(isolate());
4938  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
4939  HInstruction* literal;
4940
4941  // Check whether to use fast or slow deep-copying for boilerplate.
4942  int max_properties = kMaxFastLiteralProperties;
4943  Handle<Object> literals_cell(closure->literals()->get(expr->literal_index()),
4944                               isolate());
4945  Handle<AllocationSite> site;
4946  Handle<JSObject> boilerplate;
4947  if (!literals_cell->IsUndefined()) {
4948    // Retrieve the boilerplate
4949    site = Handle<AllocationSite>::cast(literals_cell);
4950    boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
4951                                   isolate());
4952  }
4953
4954  if (!boilerplate.is_null() &&
4955      IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
4956    AllocationSiteUsageContext usage_context(isolate(), site, false);
4957    usage_context.EnterNewScope();
4958    literal = BuildFastLiteral(boilerplate, &usage_context);
4959    usage_context.ExitScope(site, boilerplate);
4960  } else {
4961    NoObservableSideEffectsScope no_effects(this);
4962    Handle<FixedArray> closure_literals(closure->literals(), isolate());
4963    Handle<FixedArray> constant_properties = expr->constant_properties();
4964    int literal_index = expr->literal_index();
4965    int flags = expr->fast_elements()
4966        ? ObjectLiteral::kFastElements : ObjectLiteral::kNoFlags;
4967    flags |= expr->has_function()
4968        ? ObjectLiteral::kHasFunction : ObjectLiteral::kNoFlags;
4969
4970    Add<HPushArgument>(Add<HConstant>(closure_literals));
4971    Add<HPushArgument>(Add<HConstant>(literal_index));
4972    Add<HPushArgument>(Add<HConstant>(constant_properties));
4973    Add<HPushArgument>(Add<HConstant>(flags));
4974
4975    // TODO(mvstanton): Add a flag to turn off creation of any
4976    // AllocationMementos for this call: we are in crankshaft and should have
4977    // learned enough about transition behavior to stop emitting mementos.
4978    Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
4979    literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
4980                                Runtime::FunctionForId(function_id),
4981                                4);
4982  }
4983
4984  // The object is expected in the bailout environment during computation
4985  // of the property values and is the value of the entire expression.
4986  Push(literal);
4987
4988  expr->CalculateEmitStore(zone());
4989
4990  for (int i = 0; i < expr->properties()->length(); i++) {
4991    ObjectLiteral::Property* property = expr->properties()->at(i);
4992    if (property->IsCompileTimeValue()) continue;
4993
4994    Literal* key = property->key();
4995    Expression* value = property->value();
4996
4997    switch (property->kind()) {
4998      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
4999        ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
5000        // Fall through.
5001      case ObjectLiteral::Property::COMPUTED:
5002        if (key->value()->IsInternalizedString()) {
5003          if (property->emit_store()) {
5004            CHECK_ALIVE(VisitForValue(value));
5005            HValue* value = Pop();
5006            Handle<Map> map = property->GetReceiverType();
5007            Handle<String> name = property->key()->AsPropertyName();
5008            HInstruction* store;
5009            if (map.is_null()) {
5010              // If we don't know the monomorphic type, do a generic store.
5011              CHECK_ALIVE(store = BuildStoreNamedGeneric(literal, name, value));
5012            } else {
5013#if DEBUG
5014              Handle<JSFunction> setter;
5015              Handle<JSObject> holder;
5016              ASSERT(!LookupSetter(map, name, &setter, &holder));
5017#endif
5018              CHECK_ALIVE(store = BuildStoreNamedMonomorphic(literal,
5019                                                             name,
5020                                                             value,
5021                                                             map));
5022            }
5023            AddInstruction(store);
5024            if (store->HasObservableSideEffects()) {
5025              Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5026            }
5027          } else {
5028            CHECK_ALIVE(VisitForEffect(value));
5029          }
5030          break;
5031        }
5032        // Fall through.
5033      case ObjectLiteral::Property::PROTOTYPE:
5034      case ObjectLiteral::Property::SETTER:
5035      case ObjectLiteral::Property::GETTER:
5036        return Bailout(kObjectLiteralWithComplexProperty);
5037      default: UNREACHABLE();
5038    }
5039  }
5040
5041  if (expr->has_function()) {
5042    // Return the result of the transformation to fast properties
5043    // instead of the original since this operation changes the map
5044    // of the object. This makes sure that the original object won't
5045    // be used by other optimized code before it is transformed
5046    // (e.g. because of code motion).
5047    HToFastProperties* result = Add<HToFastProperties>(Pop());
5048    return ast_context()->ReturnValue(result);
5049  } else {
5050    return ast_context()->ReturnValue(Pop());
5051  }
5052}
5053
5054
5055void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5056  ASSERT(!HasStackOverflow());
5057  ASSERT(current_block() != NULL);
5058  ASSERT(current_block()->HasPredecessor());
5059  expr->BuildConstantElements(isolate());
5060  ZoneList<Expression*>* subexprs = expr->values();
5061  int length = subexprs->length();
5062  HInstruction* literal;
5063
5064  Handle<AllocationSite> site;
5065  Handle<FixedArray> literals(environment()->closure()->literals(), isolate());
5066  bool uninitialized = false;
5067  Handle<Object> literals_cell(literals->get(expr->literal_index()),
5068                               isolate());
5069  Handle<JSObject> boilerplate_object;
5070  if (literals_cell->IsUndefined()) {
5071    uninitialized = true;
5072    Handle<Object> raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate(
5073        isolate(), literals, expr->constant_elements());
5074    if (raw_boilerplate.is_null()) {
5075      return Bailout(kArrayBoilerplateCreationFailed);
5076    }
5077
5078    boilerplate_object = Handle<JSObject>::cast(raw_boilerplate);
5079    AllocationSiteCreationContext creation_context(isolate());
5080    site = creation_context.EnterNewScope();
5081    if (JSObject::DeepWalk(boilerplate_object, &creation_context).is_null()) {
5082      return Bailout(kArrayBoilerplateCreationFailed);
5083    }
5084    creation_context.ExitScope(site, boilerplate_object);
5085    literals->set(expr->literal_index(), *site);
5086
5087    if (boilerplate_object->elements()->map() ==
5088        isolate()->heap()->fixed_cow_array_map()) {
5089      isolate()->counters()->cow_arrays_created_runtime()->Increment();
5090    }
5091  } else {
5092    ASSERT(literals_cell->IsAllocationSite());
5093    site = Handle<AllocationSite>::cast(literals_cell);
5094    boilerplate_object = Handle<JSObject>(
5095        JSObject::cast(site->transition_info()), isolate());
5096  }
5097
5098  ASSERT(!boilerplate_object.is_null());
5099  ASSERT(site->SitePointsToLiteral());
5100
5101  ElementsKind boilerplate_elements_kind =
5102      boilerplate_object->GetElementsKind();
5103
5104  // Check whether to use fast or slow deep-copying for boilerplate.
5105  int max_properties = kMaxFastLiteralProperties;
5106  if (IsFastLiteral(boilerplate_object,
5107                    kMaxFastLiteralDepth,
5108                    &max_properties)) {
5109    AllocationSiteUsageContext usage_context(isolate(), site, false);
5110    usage_context.EnterNewScope();
5111    literal = BuildFastLiteral(boilerplate_object, &usage_context);
5112    usage_context.ExitScope(site, boilerplate_object);
5113  } else {
5114    NoObservableSideEffectsScope no_effects(this);
5115    // Boilerplate already exists and constant elements are never accessed,
5116    // pass an empty fixed array to the runtime function instead.
5117    Handle<FixedArray> constants = isolate()->factory()->empty_fixed_array();
5118    int literal_index = expr->literal_index();
5119    int flags = expr->depth() == 1
5120        ? ArrayLiteral::kShallowElements
5121        : ArrayLiteral::kNoFlags;
5122    flags |= ArrayLiteral::kDisableMementos;
5123
5124    Add<HPushArgument>(Add<HConstant>(literals));
5125    Add<HPushArgument>(Add<HConstant>(literal_index));
5126    Add<HPushArgument>(Add<HConstant>(constants));
5127    Add<HPushArgument>(Add<HConstant>(flags));
5128
5129    // TODO(mvstanton): Consider a flag to turn off creation of any
5130    // AllocationMementos for this call: we are in crankshaft and should have
5131    // learned enough about transition behavior to stop emitting mementos.
5132    Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
5133    literal = Add<HCallRuntime>(isolate()->factory()->empty_string(),
5134                                Runtime::FunctionForId(function_id),
5135                                4);
5136
5137    // De-opt if elements kind changed from boilerplate_elements_kind.
5138    Handle<Map> map = Handle<Map>(boilerplate_object->map(), isolate());
5139    literal = Add<HCheckMaps>(literal, map, top_info());
5140  }
5141
5142  // The array is expected in the bailout environment during computation
5143  // of the property values and is the value of the entire expression.
5144  Push(literal);
5145  // The literal index is on the stack, too.
5146  Push(Add<HConstant>(expr->literal_index()));
5147
5148  HInstruction* elements = NULL;
5149
5150  for (int i = 0; i < length; i++) {
5151    Expression* subexpr = subexprs->at(i);
5152    // If the subexpression is a literal or a simple materialized literal it
5153    // is already set in the cloned array.
5154    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5155
5156    CHECK_ALIVE(VisitForValue(subexpr));
5157    HValue* value = Pop();
5158    if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5159
5160    elements = AddLoadElements(literal);
5161
5162    HValue* key = Add<HConstant>(i);
5163
5164    switch (boilerplate_elements_kind) {
5165      case FAST_SMI_ELEMENTS:
5166      case FAST_HOLEY_SMI_ELEMENTS:
5167      case FAST_ELEMENTS:
5168      case FAST_HOLEY_ELEMENTS:
5169      case FAST_DOUBLE_ELEMENTS:
5170      case FAST_HOLEY_DOUBLE_ELEMENTS: {
5171        HStoreKeyed* instr = Add<HStoreKeyed>(elements, key, value,
5172                                              boilerplate_elements_kind);
5173        instr->SetUninitialized(uninitialized);
5174        break;
5175      }
5176      default:
5177        UNREACHABLE();
5178        break;
5179    }
5180
5181    Add<HSimulate>(expr->GetIdForElement(i));
5182  }
5183
5184  Drop(1);  // array literal index
5185  return ast_context()->ReturnValue(Pop());
5186}
5187
5188
5189HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5190                                                Handle<Map> map) {
5191  BuildCheckHeapObject(object);
5192  return Add<HCheckMaps>(object, map, top_info());
5193}
5194
5195
5196HInstruction* HOptimizedGraphBuilder::BuildStoreNamedField(
5197    HValue* checked_object,
5198    Handle<String> name,
5199    HValue* value,
5200    Handle<Map> map,
5201    LookupResult* lookup) {
5202  ASSERT(lookup->IsFound());
5203  // If the property does not exist yet, we have to check that it wasn't made
5204  // readonly or turned into a setter by some meanwhile modifications on the
5205  // prototype chain.
5206  if (!lookup->IsProperty() && map->prototype()->IsJSReceiver()) {
5207    Object* proto = map->prototype();
5208    // First check that the prototype chain isn't affected already.
5209    LookupResult proto_result(isolate());
5210    proto->Lookup(*name, &proto_result);
5211    if (proto_result.IsProperty()) {
5212      // If the inherited property could induce readonly-ness, bail out.
5213      if (proto_result.IsReadOnly() || !proto_result.IsCacheable()) {
5214        Bailout(kImproperObjectOnPrototypeChainForStore);
5215        return NULL;
5216      }
5217      // We only need to check up to the preexisting property.
5218      proto = proto_result.holder();
5219    } else {
5220      // Otherwise, find the top prototype.
5221      while (proto->GetPrototype(isolate())->IsJSObject()) {
5222        proto = proto->GetPrototype(isolate());
5223      }
5224      ASSERT(proto->GetPrototype(isolate())->IsNull());
5225    }
5226    ASSERT(proto->IsJSObject());
5227    BuildCheckPrototypeMaps(
5228        Handle<JSObject>(JSObject::cast(map->prototype())),
5229        Handle<JSObject>(JSObject::cast(proto)));
5230  }
5231
5232  HObjectAccess field_access = HObjectAccess::ForField(map, lookup, name);
5233  bool transition_to_field = lookup->IsTransitionToField();
5234
5235  HStoreNamedField *instr;
5236  if (FLAG_track_double_fields && field_access.representation().IsDouble()) {
5237    HObjectAccess heap_number_access =
5238        field_access.WithRepresentation(Representation::Tagged());
5239    if (transition_to_field) {
5240      // The store requires a mutable HeapNumber to be allocated.
5241      NoObservableSideEffectsScope no_side_effects(this);
5242      HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5243      HInstruction* heap_number = Add<HAllocate>(heap_number_size,
5244          HType::HeapNumber(), isolate()->heap()->GetPretenureMode(),
5245          HEAP_NUMBER_TYPE);
5246      AddStoreMapConstant(heap_number, isolate()->factory()->heap_number_map());
5247      Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5248                            value);
5249      instr = New<HStoreNamedField>(checked_object->ActualValue(),
5250                                    heap_number_access,
5251                                    heap_number);
5252    } else {
5253      // Already holds a HeapNumber; load the box and write its value field.
5254      HInstruction* heap_number = Add<HLoadNamedField>(checked_object,
5255                                                       heap_number_access);
5256      heap_number->set_type(HType::HeapNumber());
5257      instr = New<HStoreNamedField>(heap_number,
5258                                    HObjectAccess::ForHeapNumberValue(),
5259                                    value);
5260    }
5261  } else {
5262    // This is a normal store.
5263    instr = New<HStoreNamedField>(checked_object->ActualValue(),
5264                                  field_access,
5265                                  value);
5266  }
5267
5268  if (transition_to_field) {
5269    Handle<Map> transition(lookup->GetTransitionTarget());
5270    HConstant* transition_constant = Add<HConstant>(transition);
5271    instr->SetTransition(transition_constant, top_info());
5272    // TODO(fschneider): Record the new map type of the object in the IR to
5273    // enable elimination of redundant checks after the transition store.
5274    instr->SetGVNFlag(kChangesMaps);
5275  }
5276  return instr;
5277}
5278
5279
5280HInstruction* HOptimizedGraphBuilder::BuildStoreNamedGeneric(
5281    HValue* object,
5282    Handle<String> name,
5283    HValue* value) {
5284  return New<HStoreNamedGeneric>(
5285                         object,
5286                         name,
5287                         value,
5288                         function_strict_mode_flag());
5289}
5290
5291
5292// Sets the lookup result and returns true if the load/store can be inlined.
5293static bool ComputeStoreField(Handle<Map> type,
5294                              Handle<String> name,
5295                              LookupResult* lookup,
5296                              bool lookup_transition = true) {
5297  ASSERT(!type->is_observed());
5298  if (!CanInlinePropertyAccess(*type)) {
5299    lookup->NotFound();
5300    return false;
5301  }
5302  // If we directly find a field, the access can be inlined.
5303  type->LookupDescriptor(NULL, *name, lookup);
5304  if (lookup->IsField()) return true;
5305
5306  if (!lookup_transition) return false;
5307
5308  type->LookupTransition(NULL, *name, lookup);
5309  return lookup->IsTransitionToField() &&
5310      (type->unused_property_fields() > 0);
5311}
5312
5313
5314HInstruction* HOptimizedGraphBuilder::BuildStoreNamedMonomorphic(
5315    HValue* object,
5316    Handle<String> name,
5317    HValue* value,
5318    Handle<Map> map) {
5319  // Handle a store to a known field.
5320  LookupResult lookup(isolate());
5321  if (ComputeStoreField(map, name, &lookup)) {
5322    HCheckMaps* checked_object = AddCheckMap(object, map);
5323    return BuildStoreNamedField(checked_object, name, value, map, &lookup);
5324  }
5325
5326  // No luck, do a generic store.
5327  return BuildStoreNamedGeneric(object, name, value);
5328}
5329
5330
5331bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatibleForLoad(
5332    PropertyAccessInfo* info) {
5333  if (!CanInlinePropertyAccess(*map_)) return false;
5334
5335  if (!LookupDescriptor()) return false;
5336
5337  if (!lookup_.IsFound()) {
5338    return (!info->lookup_.IsFound() || info->has_holder()) &&
5339        map_->prototype() == info->map_->prototype();
5340  }
5341
5342  // Mismatch if the other access info found the property in the prototype
5343  // chain.
5344  if (info->has_holder()) return false;
5345
5346  if (lookup_.IsPropertyCallbacks()) {
5347    return accessor_.is_identical_to(info->accessor_);
5348  }
5349
5350  if (lookup_.IsConstant()) {
5351    return constant_.is_identical_to(info->constant_);
5352  }
5353
5354  ASSERT(lookup_.IsField());
5355  if (!info->lookup_.IsField()) return false;
5356
5357  Representation r = access_.representation();
5358  if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5359  if (info->access_.offset() != access_.offset()) return false;
5360  if (info->access_.IsInobject() != access_.IsInobject()) return false;
5361  info->GeneralizeRepresentation(r);
5362  return true;
5363}
5364
5365
5366bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5367  map_->LookupDescriptor(NULL, *name_, &lookup_);
5368  return LoadResult(map_);
5369}
5370
5371
5372bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5373  if (lookup_.IsField()) {
5374    access_ = HObjectAccess::ForField(map, &lookup_, name_);
5375  } else if (lookup_.IsPropertyCallbacks()) {
5376    Handle<Object> callback(lookup_.GetValueFromMap(*map), isolate());
5377    if (!callback->IsAccessorPair()) return false;
5378    Object* getter = Handle<AccessorPair>::cast(callback)->getter();
5379    if (!getter->IsJSFunction()) return false;
5380    Handle<JSFunction> accessor = handle(JSFunction::cast(getter));
5381    CallOptimization call_optimization(accessor);
5382    // TODO(dcarney): temporary hack unless crankshaft can handle api calls.
5383    if (call_optimization.is_simple_api_call()) return false;
5384    accessor_ = accessor;
5385  } else if (lookup_.IsConstant()) {
5386    constant_ = handle(lookup_.GetConstantFromMap(*map), isolate());
5387  }
5388
5389  return true;
5390}
5391
5392
5393bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5394  Handle<Map> map = map_;
5395  while (map->prototype()->IsJSObject()) {
5396    holder_ = handle(JSObject::cast(map->prototype()));
5397    if (holder_->map()->is_deprecated()) {
5398      JSObject::TryMigrateInstance(holder_);
5399    }
5400    map = Handle<Map>(holder_->map());
5401    if (!CanInlinePropertyAccess(*map)) {
5402      lookup_.NotFound();
5403      return false;
5404    }
5405    map->LookupDescriptor(*holder_, *name_, &lookup_);
5406    if (lookup_.IsFound()) return LoadResult(map);
5407  }
5408  lookup_.NotFound();
5409  return true;
5410}
5411
5412
5413bool HOptimizedGraphBuilder::PropertyAccessInfo::CanLoadMonomorphic() {
5414  if (!CanInlinePropertyAccess(*map_)) return IsStringLength();
5415  if (IsJSObjectFieldAccessor()) return true;
5416  if (!LookupDescriptor()) return false;
5417  if (lookup_.IsFound()) return true;
5418  return LookupInPrototypes();
5419}
5420
5421
5422bool HOptimizedGraphBuilder::PropertyAccessInfo::CanLoadAsMonomorphic(
5423    SmallMapList* types) {
5424  ASSERT(map_.is_identical_to(types->first()));
5425  if (!CanLoadMonomorphic()) return false;
5426  if (types->length() > kMaxLoadPolymorphism) return false;
5427
5428  if (IsStringLength()) {
5429    for (int i = 1; i < types->length(); ++i) {
5430      if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
5431    }
5432    return true;
5433  }
5434
5435  if (IsArrayLength()) {
5436    bool is_fast = IsFastElementsKind(map_->elements_kind());
5437    for (int i = 1; i < types->length(); ++i) {
5438      Handle<Map> test_map = types->at(i);
5439      if (test_map->instance_type() != JS_ARRAY_TYPE) return false;
5440      if (IsFastElementsKind(test_map->elements_kind()) != is_fast) {
5441        return false;
5442      }
5443    }
5444    return true;
5445  }
5446
5447  if (IsJSObjectFieldAccessor()) {
5448    InstanceType instance_type = map_->instance_type();
5449    for (int i = 1; i < types->length(); ++i) {
5450      if (types->at(i)->instance_type() != instance_type) return false;
5451    }
5452    return true;
5453  }
5454
5455  for (int i = 1; i < types->length(); ++i) {
5456    PropertyAccessInfo test_info(isolate(), types->at(i), name_);
5457    if (!test_info.IsCompatibleForLoad(this)) return false;
5458  }
5459
5460  return true;
5461}
5462
5463
5464HInstruction* HOptimizedGraphBuilder::BuildLoadMonomorphic(
5465    PropertyAccessInfo* info,
5466    HValue* object,
5467    HInstruction* checked_object,
5468    BailoutId ast_id,
5469    BailoutId return_id,
5470    bool can_inline_accessor) {
5471
5472  HObjectAccess access = HObjectAccess::ForMap();  // bogus default
5473  if (info->GetJSObjectFieldAccess(&access)) {
5474    return New<HLoadNamedField>(checked_object, access);
5475  }
5476
5477  HValue* checked_holder = checked_object;
5478  if (info->has_holder()) {
5479    Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
5480    checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
5481  }
5482
5483  if (!info->lookup()->IsFound()) return graph()->GetConstantUndefined();
5484
5485  if (info->lookup()->IsField()) {
5486    return BuildLoadNamedField(checked_holder, info->access());
5487  }
5488
5489  if (info->lookup()->IsPropertyCallbacks()) {
5490    Push(checked_object);
5491    if (FLAG_inline_accessors &&
5492        can_inline_accessor &&
5493        TryInlineGetter(info->accessor(), ast_id, return_id)) {
5494      return NULL;
5495    }
5496    Add<HPushArgument>(Pop());
5497    return New<HCallConstantFunction>(info->accessor(), 1);
5498  }
5499
5500  ASSERT(info->lookup()->IsConstant());
5501  return New<HConstant>(info->constant());
5502}
5503
5504
5505void HOptimizedGraphBuilder::HandlePolymorphicLoadNamedField(
5506    BailoutId ast_id,
5507    BailoutId return_id,
5508    HValue* object,
5509    SmallMapList* types,
5510    Handle<String> name) {
5511  // Something did not match; must use a polymorphic load.
5512  int count = 0;
5513  HBasicBlock* join = NULL;
5514  for (int i = 0; i < types->length() && count < kMaxLoadPolymorphism; ++i) {
5515    PropertyAccessInfo info(isolate(), types->at(i), name);
5516    if (info.CanLoadMonomorphic()) {
5517      if (count == 0) {
5518        BuildCheckHeapObject(object);
5519        join = graph()->CreateBasicBlock();
5520      }
5521      ++count;
5522      HBasicBlock* if_true = graph()->CreateBasicBlock();
5523      HBasicBlock* if_false = graph()->CreateBasicBlock();
5524      HCompareMap* compare = New<HCompareMap>(
5525          object, info.map(),  if_true, if_false);
5526      FinishCurrentBlock(compare);
5527
5528      set_current_block(if_true);
5529
5530      HInstruction* load = BuildLoadMonomorphic(
5531          &info, object, compare, ast_id, return_id, FLAG_polymorphic_inlining);
5532      if (load == NULL) {
5533        if (HasStackOverflow()) return;
5534      } else {
5535        if (!load->IsLinked()) {
5536          AddInstruction(load);
5537        }
5538        if (!ast_context()->IsEffect()) Push(load);
5539      }
5540
5541      if (current_block() != NULL) Goto(join);
5542      set_current_block(if_false);
5543    }
5544  }
5545
5546  // Finish up.  Unconditionally deoptimize if we've handled all the maps we
5547  // know about and do not want to handle ones we've never seen.  Otherwise
5548  // use a generic IC.
5549  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5550    // Because the deopt may be the only path in the polymorphic load, make sure
5551    // that the environment stack matches the depth on deopt that it otherwise
5552    // would have had after a successful load.
5553    if (!ast_context()->IsEffect()) Push(graph()->GetConstant0());
5554    FinishExitWithHardDeoptimization("Unknown map in polymorphic load", join);
5555  } else {
5556    HInstruction* load = Add<HLoadNamedGeneric>(object, name);
5557    if (!ast_context()->IsEffect()) Push(load);
5558
5559    if (join != NULL) {
5560      Goto(join);
5561    } else {
5562      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5563      if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5564      return;
5565    }
5566  }
5567
5568  ASSERT(join != NULL);
5569  join->SetJoinId(ast_id);
5570  set_current_block(join);
5571  if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
5572}
5573
5574
5575bool HOptimizedGraphBuilder::TryStorePolymorphicAsMonomorphic(
5576    BailoutId assignment_id,
5577    HValue* object,
5578    HValue* value,
5579    SmallMapList* types,
5580    Handle<String> name) {
5581  // Use monomorphic store if property lookup results in the same field index
5582  // for all maps. Requires special map check on the set of all handled maps.
5583  if (types->length() > kMaxStorePolymorphism) return false;
5584
5585  LookupResult lookup(isolate());
5586  int count;
5587  Representation representation = Representation::None();
5588  HObjectAccess access = HObjectAccess::ForMap();  // initial value unused.
5589  for (count = 0; count < types->length(); ++count) {
5590    Handle<Map> map = types->at(count);
5591    // Pass false to ignore transitions.
5592    if (!ComputeStoreField(map, name, &lookup, false)) break;
5593    ASSERT(!map->is_observed());
5594
5595    HObjectAccess new_access = HObjectAccess::ForField(map, &lookup, name);
5596    Representation new_representation = new_access.representation();
5597
5598    if (count == 0) {
5599      // First time through the loop; set access and representation.
5600      access = new_access;
5601      representation = new_representation;
5602    } else if (!representation.IsCompatibleForStore(new_representation)) {
5603      // Representations did not match.
5604      break;
5605    } else if (access.offset() != new_access.offset()) {
5606      // Offsets did not match.
5607      break;
5608    } else if (access.IsInobject() != new_access.IsInobject()) {
5609      // In-objectness did not match.
5610      break;
5611    }
5612  }
5613
5614  if (count != types->length()) return false;
5615
5616  // Everything matched; can use monomorphic store.
5617  BuildCheckHeapObject(object);
5618  HCheckMaps* checked_object = Add<HCheckMaps>(object, types);
5619  HInstruction* store;
5620  CHECK_ALIVE_OR_RETURN(
5621      store = BuildStoreNamedField(
5622          checked_object, name, value, types->at(count - 1), &lookup),
5623      true);
5624  if (!ast_context()->IsEffect()) Push(value);
5625  AddInstruction(store);
5626  Add<HSimulate>(assignment_id);
5627  if (!ast_context()->IsEffect()) Drop(1);
5628  ast_context()->ReturnValue(value);
5629  return true;
5630}
5631
5632
5633void HOptimizedGraphBuilder::HandlePolymorphicStoreNamedField(
5634    BailoutId assignment_id,
5635    HValue* object,
5636    HValue* value,
5637    SmallMapList* types,
5638    Handle<String> name) {
5639  if (TryStorePolymorphicAsMonomorphic(
5640          assignment_id, object, value, types, name)) {
5641    return;
5642  }
5643
5644  // TODO(ager): We should recognize when the prototype chains for different
5645  // maps are identical. In that case we can avoid repeatedly generating the
5646  // same prototype map checks.
5647  int count = 0;
5648  HBasicBlock* join = NULL;
5649  for (int i = 0; i < types->length() && count < kMaxStorePolymorphism; ++i) {
5650    Handle<Map> map = types->at(i);
5651    LookupResult lookup(isolate());
5652    if (ComputeStoreField(map, name, &lookup)) {
5653      if (count == 0) {
5654        BuildCheckHeapObject(object);
5655        join = graph()->CreateBasicBlock();
5656      }
5657      ++count;
5658      HBasicBlock* if_true = graph()->CreateBasicBlock();
5659      HBasicBlock* if_false = graph()->CreateBasicBlock();
5660      HCompareMap* compare = New<HCompareMap>(object, map,  if_true, if_false);
5661      FinishCurrentBlock(compare);
5662
5663      set_current_block(if_true);
5664      HInstruction* instr;
5665      CHECK_ALIVE(instr = BuildStoreNamedField(
5666          compare, name, value, map, &lookup));
5667      // Goto will add the HSimulate for the store.
5668      AddInstruction(instr);
5669      if (!ast_context()->IsEffect()) Push(value);
5670      Goto(join);
5671
5672      set_current_block(if_false);
5673    }
5674  }
5675
5676  // Finish up.  Unconditionally deoptimize if we've handled all the maps we
5677  // know about and do not want to handle ones we've never seen.  Otherwise
5678  // use a generic IC.
5679  if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
5680    FinishExitWithHardDeoptimization("Unknown map in polymorphic store", join);
5681  } else {
5682    HInstruction* instr = BuildStoreNamedGeneric(object, name, value);
5683    AddInstruction(instr);
5684
5685    if (join != NULL) {
5686      if (!ast_context()->IsEffect()) {
5687        Push(value);
5688      }
5689      Goto(join);
5690    } else {
5691      // The HSimulate for the store should not see the stored value in
5692      // effect contexts (it is not materialized at expr->id() in the
5693      // unoptimized code).
5694      if (instr->HasObservableSideEffects()) {
5695        if (ast_context()->IsEffect()) {
5696          Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE);
5697        } else {
5698          Push(value);
5699          Add<HSimulate>(assignment_id, REMOVABLE_SIMULATE);
5700          Drop(1);
5701        }
5702      }
5703      return ast_context()->ReturnValue(value);
5704    }
5705  }
5706
5707  ASSERT(join != NULL);
5708  join->SetJoinId(assignment_id);
5709  set_current_block(join);
5710  if (!ast_context()->IsEffect()) {
5711    ast_context()->ReturnValue(Pop());
5712  }
5713}
5714
5715
5716static bool ComputeReceiverTypes(Expression* expr,
5717                                 HValue* receiver,
5718                                 SmallMapList** t) {
5719  SmallMapList* types = expr->GetReceiverTypes();
5720  *t = types;
5721  bool monomorphic = expr->IsMonomorphic();
5722  if (types != NULL && receiver->HasMonomorphicJSObjectType()) {
5723    Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
5724    types->FilterForPossibleTransitions(root_map);
5725    monomorphic = types->length() == 1;
5726  }
5727  return monomorphic && CanInlinePropertyAccess(*types->first());
5728}
5729
5730
5731void HOptimizedGraphBuilder::BuildStore(Expression* expr,
5732                                        Property* prop,
5733                                        BailoutId ast_id,
5734                                        BailoutId return_id,
5735                                        bool is_uninitialized) {
5736  HValue* value = environment()->ExpressionStackAt(0);
5737
5738  if (!prop->key()->IsPropertyName()) {
5739    // Keyed store.
5740    HValue* key = environment()->ExpressionStackAt(1);
5741    HValue* object = environment()->ExpressionStackAt(2);
5742    bool has_side_effects = false;
5743    HandleKeyedElementAccess(object, key, value, expr,
5744                             true,  // is_store
5745                             &has_side_effects);
5746    Drop(3);
5747    Push(value);
5748    Add<HSimulate>(return_id, REMOVABLE_SIMULATE);
5749    return ast_context()->ReturnValue(Pop());
5750  }
5751
5752  // Named store.
5753  HValue* object = environment()->ExpressionStackAt(1);
5754
5755  if (is_uninitialized) {
5756    Add<HDeoptimize>("Insufficient type feedback for property assignment",
5757                     Deoptimizer::SOFT);
5758  }
5759
5760  Literal* key = prop->key()->AsLiteral();
5761  Handle<String> name = Handle<String>::cast(key->value());
5762  ASSERT(!name.is_null());
5763
5764  HInstruction* instr = NULL;
5765
5766  SmallMapList* types;
5767  bool monomorphic = ComputeReceiverTypes(expr, object, &types);
5768
5769  if (monomorphic) {
5770    Handle<Map> map = types->first();
5771    Handle<JSFunction> setter;
5772    Handle<JSObject> holder;
5773    if (LookupSetter(map, name, &setter, &holder)) {
5774      AddCheckConstantFunction(holder, object, map);
5775      if (FLAG_inline_accessors &&
5776          TryInlineSetter(setter, ast_id, return_id, value)) {
5777        return;
5778      }
5779      Drop(2);
5780      Add<HPushArgument>(object);
5781      Add<HPushArgument>(value);
5782      instr = New<HCallConstantFunction>(setter, 2);
5783    } else {
5784      Drop(2);
5785      CHECK_ALIVE(instr = BuildStoreNamedMonomorphic(object,
5786                                                     name,
5787                                                     value,
5788                                                     map));
5789    }
5790  } else if (types != NULL && types->length() > 1) {
5791    Drop(2);
5792    return HandlePolymorphicStoreNamedField(ast_id, object, value, types, name);
5793  } else {
5794    Drop(2);
5795    instr = BuildStoreNamedGeneric(object, name, value);
5796  }
5797
5798  if (!ast_context()->IsEffect()) Push(value);
5799  AddInstruction(instr);
5800  if (instr->HasObservableSideEffects()) {
5801    Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5802  }
5803  if (!ast_context()->IsEffect()) Drop(1);
5804  return ast_context()->ReturnValue(value);
5805}
5806
5807
5808void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
5809  Property* prop = expr->target()->AsProperty();
5810  ASSERT(prop != NULL);
5811  CHECK_ALIVE(VisitForValue(prop->obj()));
5812  if (!prop->key()->IsPropertyName()) {
5813    CHECK_ALIVE(VisitForValue(prop->key()));
5814  }
5815  CHECK_ALIVE(VisitForValue(expr->value()));
5816  BuildStore(expr, prop, expr->id(),
5817             expr->AssignmentId(), expr->IsUninitialized());
5818}
5819
5820
5821// Because not every expression has a position and there is not common
5822// superclass of Assignment and CountOperation, we cannot just pass the
5823// owning expression instead of position and ast_id separately.
5824void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(
5825    Variable* var,
5826    HValue* value,
5827    BailoutId ast_id) {
5828  LookupResult lookup(isolate());
5829  GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, true);
5830  if (type == kUseCell) {
5831    Handle<GlobalObject> global(current_info()->global_object());
5832    Handle<PropertyCell> cell(global->GetPropertyCell(&lookup));
5833    if (cell->type()->IsConstant()) {
5834      IfBuilder builder(this);
5835      HValue* constant = Add<HConstant>(cell->type()->AsConstant());
5836      if (cell->type()->AsConstant()->IsNumber()) {
5837        builder.If<HCompareNumericAndBranch>(value, constant, Token::EQ);
5838      } else {
5839        builder.If<HCompareObjectEqAndBranch>(value, constant);
5840      }
5841      builder.Then();
5842      builder.Else();
5843      Add<HDeoptimize>("Constant global variable assignment",
5844                       Deoptimizer::EAGER);
5845      builder.End();
5846    }
5847    HInstruction* instr =
5848        Add<HStoreGlobalCell>(value, cell, lookup.GetPropertyDetails());
5849    if (instr->HasObservableSideEffects()) {
5850      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5851    }
5852  } else {
5853    HGlobalObject* global_object = Add<HGlobalObject>();
5854    HStoreGlobalGeneric* instr =
5855        Add<HStoreGlobalGeneric>(global_object, var->name(),
5856                                 value, function_strict_mode_flag());
5857    USE(instr);
5858    ASSERT(instr->HasObservableSideEffects());
5859    Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
5860  }
5861}
5862
5863
5864void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
5865  Expression* target = expr->target();
5866  VariableProxy* proxy = target->AsVariableProxy();
5867  Property* prop = target->AsProperty();
5868  ASSERT(proxy == NULL || prop == NULL);
5869
5870  // We have a second position recorded in the FullCodeGenerator to have
5871  // type feedback for the binary operation.
5872  BinaryOperation* operation = expr->binary_operation();
5873
5874  if (proxy != NULL) {
5875    Variable* var = proxy->var();
5876    if (var->mode() == LET)  {
5877      return Bailout(kUnsupportedLetCompoundAssignment);
5878    }
5879
5880    CHECK_ALIVE(VisitForValue(operation));
5881
5882    switch (var->location()) {
5883      case Variable::UNALLOCATED:
5884        HandleGlobalVariableAssignment(var,
5885                                       Top(),
5886                                       expr->AssignmentId());
5887        break;
5888
5889      case Variable::PARAMETER:
5890      case Variable::LOCAL:
5891        if (var->mode() == CONST)  {
5892          return Bailout(kUnsupportedConstCompoundAssignment);
5893        }
5894        BindIfLive(var, Top());
5895        break;
5896
5897      case Variable::CONTEXT: {
5898        // Bail out if we try to mutate a parameter value in a function
5899        // using the arguments object.  We do not (yet) correctly handle the
5900        // arguments property of the function.
5901        if (current_info()->scope()->arguments() != NULL) {
5902          // Parameters will be allocated to context slots.  We have no
5903          // direct way to detect that the variable is a parameter so we do
5904          // a linear search of the parameter variables.
5905          int count = current_info()->scope()->num_parameters();
5906          for (int i = 0; i < count; ++i) {
5907            if (var == current_info()->scope()->parameter(i)) {
5908              Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
5909            }
5910          }
5911        }
5912
5913        HStoreContextSlot::Mode mode;
5914
5915        switch (var->mode()) {
5916          case LET:
5917            mode = HStoreContextSlot::kCheckDeoptimize;
5918            break;
5919          case CONST:
5920            return ast_context()->ReturnValue(Pop());
5921          case CONST_HARMONY:
5922            // This case is checked statically so no need to
5923            // perform checks here
5924            UNREACHABLE();
5925          default:
5926            mode = HStoreContextSlot::kNoCheck;
5927        }
5928
5929        HValue* context = BuildContextChainWalk(var);
5930        HStoreContextSlot* instr = Add<HStoreContextSlot>(
5931            context, var->index(), mode, Top());
5932        if (instr->HasObservableSideEffects()) {
5933          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
5934        }
5935        break;
5936      }
5937
5938      case Variable::LOOKUP:
5939        return Bailout(kCompoundAssignmentToLookupSlot);
5940    }
5941    return ast_context()->ReturnValue(Pop());
5942
5943  } else if (prop != NULL) {
5944    CHECK_ALIVE(VisitForValue(prop->obj()));
5945    HValue* object = Top();
5946    HValue* key = NULL;
5947    if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
5948        prop->IsStringAccess()) {
5949      CHECK_ALIVE(VisitForValue(prop->key()));
5950      key = Top();
5951    }
5952
5953    CHECK_ALIVE(PushLoad(prop, object, key));
5954
5955    CHECK_ALIVE(VisitForValue(expr->value()));
5956    HValue* right = Pop();
5957    HValue* left = Pop();
5958
5959    Push(BuildBinaryOperation(operation, left, right));
5960    BuildStore(expr, prop, expr->id(),
5961               expr->AssignmentId(), expr->IsUninitialized());
5962  } else {
5963    return Bailout(kInvalidLhsInCompoundAssignment);
5964  }
5965}
5966
5967
5968void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
5969  ASSERT(!HasStackOverflow());
5970  ASSERT(current_block() != NULL);
5971  ASSERT(current_block()->HasPredecessor());
5972  VariableProxy* proxy = expr->target()->AsVariableProxy();
5973  Property* prop = expr->target()->AsProperty();
5974  ASSERT(proxy == NULL || prop == NULL);
5975
5976  if (expr->is_compound()) {
5977    HandleCompoundAssignment(expr);
5978    return;
5979  }
5980
5981  if (prop != NULL) {
5982    HandlePropertyAssignment(expr);
5983  } else if (proxy != NULL) {
5984    Variable* var = proxy->var();
5985
5986    if (var->mode() == CONST) {
5987      if (expr->op() != Token::INIT_CONST) {
5988        CHECK_ALIVE(VisitForValue(expr->value()));
5989        return ast_context()->ReturnValue(Pop());
5990      }
5991
5992      if (var->IsStackAllocated()) {
5993        // We insert a use of the old value to detect unsupported uses of const
5994        // variables (e.g. initialization inside a loop).
5995        HValue* old_value = environment()->Lookup(var);
5996        Add<HUseConst>(old_value);
5997      }
5998    } else if (var->mode() == CONST_HARMONY) {
5999      if (expr->op() != Token::INIT_CONST_HARMONY) {
6000        return Bailout(kNonInitializerAssignmentToConst);
6001      }
6002    }
6003
6004    if (proxy->IsArguments()) return Bailout(kAssignmentToArguments);
6005
6006    // Handle the assignment.
6007    switch (var->location()) {
6008      case Variable::UNALLOCATED:
6009        CHECK_ALIVE(VisitForValue(expr->value()));
6010        HandleGlobalVariableAssignment(var,
6011                                       Top(),
6012                                       expr->AssignmentId());
6013        return ast_context()->ReturnValue(Pop());
6014
6015      case Variable::PARAMETER:
6016      case Variable::LOCAL: {
6017        // Perform an initialization check for let declared variables
6018        // or parameters.
6019        if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6020          HValue* env_value = environment()->Lookup(var);
6021          if (env_value == graph()->GetConstantHole()) {
6022            return Bailout(kAssignmentToLetVariableBeforeInitialization);
6023          }
6024        }
6025        // We do not allow the arguments object to occur in a context where it
6026        // may escape, but assignments to stack-allocated locals are
6027        // permitted.
6028        CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6029        HValue* value = Pop();
6030        BindIfLive(var, value);
6031        return ast_context()->ReturnValue(value);
6032      }
6033
6034      case Variable::CONTEXT: {
6035        // Bail out if we try to mutate a parameter value in a function using
6036        // the arguments object.  We do not (yet) correctly handle the
6037        // arguments property of the function.
6038        if (current_info()->scope()->arguments() != NULL) {
6039          // Parameters will rewrite to context slots.  We have no direct way
6040          // to detect that the variable is a parameter.
6041          int count = current_info()->scope()->num_parameters();
6042          for (int i = 0; i < count; ++i) {
6043            if (var == current_info()->scope()->parameter(i)) {
6044              return Bailout(kAssignmentToParameterInArgumentsObject);
6045            }
6046          }
6047        }
6048
6049        CHECK_ALIVE(VisitForValue(expr->value()));
6050        HStoreContextSlot::Mode mode;
6051        if (expr->op() == Token::ASSIGN) {
6052          switch (var->mode()) {
6053            case LET:
6054              mode = HStoreContextSlot::kCheckDeoptimize;
6055              break;
6056            case CONST:
6057              return ast_context()->ReturnValue(Pop());
6058            case CONST_HARMONY:
6059              // This case is checked statically so no need to
6060              // perform checks here
6061              UNREACHABLE();
6062            default:
6063              mode = HStoreContextSlot::kNoCheck;
6064          }
6065        } else if (expr->op() == Token::INIT_VAR ||
6066                   expr->op() == Token::INIT_LET ||
6067                   expr->op() == Token::INIT_CONST_HARMONY) {
6068          mode = HStoreContextSlot::kNoCheck;
6069        } else {
6070          ASSERT(expr->op() == Token::INIT_CONST);
6071
6072          mode = HStoreContextSlot::kCheckIgnoreAssignment;
6073        }
6074
6075        HValue* context = BuildContextChainWalk(var);
6076        HStoreContextSlot* instr = Add<HStoreContextSlot>(
6077            context, var->index(), mode, Top());
6078        if (instr->HasObservableSideEffects()) {
6079          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6080        }
6081        return ast_context()->ReturnValue(Pop());
6082      }
6083
6084      case Variable::LOOKUP:
6085        return Bailout(kAssignmentToLOOKUPVariable);
6086    }
6087  } else {
6088    return Bailout(kInvalidLeftHandSideInAssignment);
6089  }
6090}
6091
6092
6093void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6094  // Generators are not optimized, so we should never get here.
6095  UNREACHABLE();
6096}
6097
6098
6099void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6100  ASSERT(!HasStackOverflow());
6101  ASSERT(current_block() != NULL);
6102  ASSERT(current_block()->HasPredecessor());
6103  // We don't optimize functions with invalid left-hand sides in
6104  // assignments, count operations, or for-in.  Consequently throw can
6105  // currently only occur in an effect context.
6106  ASSERT(ast_context()->IsEffect());
6107  CHECK_ALIVE(VisitForValue(expr->exception()));
6108
6109  HValue* value = environment()->Pop();
6110  if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
6111  Add<HThrow>(value);
6112  Add<HSimulate>(expr->id());
6113
6114  // If the throw definitely exits the function, we can finish with a dummy
6115  // control flow at this point.  This is not the case if the throw is inside
6116  // an inlined function which may be replaced.
6117  if (call_context() == NULL) {
6118    FinishExitCurrentBlock(New<HAbnormalExit>());
6119  }
6120}
6121
6122
6123HLoadNamedField* HGraphBuilder::BuildLoadNamedField(HValue* object,
6124                                                    HObjectAccess access) {
6125  if (FLAG_track_double_fields && access.representation().IsDouble()) {
6126    // load the heap number
6127    HLoadNamedField* heap_number = Add<HLoadNamedField>(
6128        object, access.WithRepresentation(Representation::Tagged()));
6129    heap_number->set_type(HType::HeapNumber());
6130    // load the double value from it
6131    return New<HLoadNamedField>(
6132        heap_number, HObjectAccess::ForHeapNumberValue());
6133  }
6134  return New<HLoadNamedField>(object, access);
6135}
6136
6137
6138HInstruction* HGraphBuilder::AddLoadNamedField(HValue* object,
6139                                               HObjectAccess access) {
6140  return AddInstruction(BuildLoadNamedField(object, access));
6141}
6142
6143
6144HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* object,
6145                                                   HValue* checked_string) {
6146  if (FLAG_fold_constants && object->IsConstant()) {
6147    HConstant* constant = HConstant::cast(object);
6148    if (constant->HasStringValue()) {
6149      return New<HConstant>(constant->StringValue()->length());
6150    }
6151  }
6152  return BuildLoadNamedField(checked_string, HObjectAccess::ForStringLength());
6153}
6154
6155
6156HInstruction* HOptimizedGraphBuilder::BuildLoadNamedGeneric(
6157    HValue* object,
6158    Handle<String> name,
6159    Property* expr) {
6160  if (expr->IsUninitialized()) {
6161    Add<HDeoptimize>("Insufficient type feedback for generic named load",
6162                     Deoptimizer::SOFT);
6163  }
6164  return New<HLoadNamedGeneric>(object, name);
6165}
6166
6167
6168
6169HInstruction* HOptimizedGraphBuilder::BuildLoadKeyedGeneric(HValue* object,
6170                                                            HValue* key) {
6171  return New<HLoadKeyedGeneric>(object, key);
6172}
6173
6174
6175LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6176  // Loads from a "stock" fast holey double arrays can elide the hole check.
6177  LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6178  if (*map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS) &&
6179      isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6180    Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6181    Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6182    BuildCheckPrototypeMaps(prototype, object_prototype);
6183    load_mode = ALLOW_RETURN_HOLE;
6184    graph()->MarkDependsOnEmptyArrayProtoElements();
6185  }
6186
6187  return load_mode;
6188}
6189
6190
6191HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6192    HValue* object,
6193    HValue* key,
6194    HValue* val,
6195    HValue* dependency,
6196    Handle<Map> map,
6197    bool is_store,
6198    KeyedAccessStoreMode store_mode) {
6199  HCheckMaps* checked_object = Add<HCheckMaps>(object, map, top_info(),
6200                                               dependency);
6201  if (dependency) {
6202    checked_object->ClearGVNFlag(kDependsOnElementsKind);
6203  }
6204
6205  if (is_store && map->prototype()->IsJSObject()) {
6206    // monomorphic stores need a prototype chain check because shape
6207    // changes could allow callbacks on elements in the chain that
6208    // aren't compatible with monomorphic keyed stores.
6209    Handle<JSObject> prototype(JSObject::cast(map->prototype()));
6210    Object* holder = map->prototype();
6211    while (holder->GetPrototype(isolate())->IsJSObject()) {
6212      holder = holder->GetPrototype(isolate());
6213    }
6214    ASSERT(holder->GetPrototype(isolate())->IsNull());
6215
6216    BuildCheckPrototypeMaps(prototype,
6217                            Handle<JSObject>(JSObject::cast(holder)));
6218  }
6219
6220  LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6221  return BuildUncheckedMonomorphicElementAccess(
6222      checked_object, key, val,
6223      map->instance_type() == JS_ARRAY_TYPE,
6224      map->elements_kind(), is_store,
6225      load_mode, store_mode);
6226}
6227
6228
6229HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
6230    HValue* object,
6231    HValue* key,
6232    HValue* val,
6233    SmallMapList* maps) {
6234  // For polymorphic loads of similar elements kinds (i.e. all tagged or all
6235  // double), always use the "worst case" code without a transition.  This is
6236  // much faster than transitioning the elements to the worst case, trading a
6237  // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
6238  bool has_double_maps = false;
6239  bool has_smi_or_object_maps = false;
6240  bool has_js_array_access = false;
6241  bool has_non_js_array_access = false;
6242  bool has_seen_holey_elements = false;
6243  Handle<Map> most_general_consolidated_map;
6244  for (int i = 0; i < maps->length(); ++i) {
6245    Handle<Map> map = maps->at(i);
6246    if (!map->IsJSObjectMap()) return NULL;
6247    // Don't allow mixing of JSArrays with JSObjects.
6248    if (map->instance_type() == JS_ARRAY_TYPE) {
6249      if (has_non_js_array_access) return NULL;
6250      has_js_array_access = true;
6251    } else if (has_js_array_access) {
6252      return NULL;
6253    } else {
6254      has_non_js_array_access = true;
6255    }
6256    // Don't allow mixed, incompatible elements kinds.
6257    if (map->has_fast_double_elements()) {
6258      if (has_smi_or_object_maps) return NULL;
6259      has_double_maps = true;
6260    } else if (map->has_fast_smi_or_object_elements()) {
6261      if (has_double_maps) return NULL;
6262      has_smi_or_object_maps = true;
6263    } else {
6264      return NULL;
6265    }
6266    // Remember if we've ever seen holey elements.
6267    if (IsHoleyElementsKind(map->elements_kind())) {
6268      has_seen_holey_elements = true;
6269    }
6270    // Remember the most general elements kind, the code for its load will
6271    // properly handle all of the more specific cases.
6272    if ((i == 0) || IsMoreGeneralElementsKindTransition(
6273            most_general_consolidated_map->elements_kind(),
6274            map->elements_kind())) {
6275      most_general_consolidated_map = map;
6276    }
6277  }
6278  if (!has_double_maps && !has_smi_or_object_maps) return NULL;
6279
6280  HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
6281  // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
6282  // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
6283  ElementsKind consolidated_elements_kind = has_seen_holey_elements
6284      ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
6285      : most_general_consolidated_map->elements_kind();
6286  HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
6287      checked_object, key, val,
6288      most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
6289      consolidated_elements_kind,
6290      false, NEVER_RETURN_HOLE, STANDARD_STORE);
6291  return instr;
6292}
6293
6294
6295HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
6296    HValue* object,
6297    HValue* key,
6298    HValue* val,
6299    SmallMapList* maps,
6300    bool is_store,
6301    KeyedAccessStoreMode store_mode,
6302    bool* has_side_effects) {
6303  *has_side_effects = false;
6304  BuildCheckHeapObject(object);
6305
6306  if (!is_store) {
6307    HInstruction* consolidated_load =
6308        TryBuildConsolidatedElementLoad(object, key, val, maps);
6309    if (consolidated_load != NULL) {
6310      *has_side_effects |= consolidated_load->HasObservableSideEffects();
6311      return consolidated_load;
6312    }
6313  }
6314
6315  // Elements_kind transition support.
6316  MapHandleList transition_target(maps->length());
6317  // Collect possible transition targets.
6318  MapHandleList possible_transitioned_maps(maps->length());
6319  for (int i = 0; i < maps->length(); ++i) {
6320    Handle<Map> map = maps->at(i);
6321    ElementsKind elements_kind = map->elements_kind();
6322    if (IsFastElementsKind(elements_kind) &&
6323        elements_kind != GetInitialFastElementsKind()) {
6324      possible_transitioned_maps.Add(map);
6325    }
6326  }
6327  // Get transition target for each map (NULL == no transition).
6328  for (int i = 0; i < maps->length(); ++i) {
6329    Handle<Map> map = maps->at(i);
6330    Handle<Map> transitioned_map =
6331        map->FindTransitionedMap(&possible_transitioned_maps);
6332    transition_target.Add(transitioned_map);
6333  }
6334
6335  MapHandleList untransitionable_maps(maps->length());
6336  HTransitionElementsKind* transition = NULL;
6337  for (int i = 0; i < maps->length(); ++i) {
6338    Handle<Map> map = maps->at(i);
6339    ASSERT(map->IsMap());
6340    if (!transition_target.at(i).is_null()) {
6341      ASSERT(Map::IsValidElementsTransition(
6342          map->elements_kind(),
6343          transition_target.at(i)->elements_kind()));
6344      transition = Add<HTransitionElementsKind>(object, map,
6345                                                transition_target.at(i));
6346    } else {
6347      untransitionable_maps.Add(map);
6348    }
6349  }
6350
6351  // If only one map is left after transitioning, handle this case
6352  // monomorphically.
6353  ASSERT(untransitionable_maps.length() >= 1);
6354  if (untransitionable_maps.length() == 1) {
6355    Handle<Map> untransitionable_map = untransitionable_maps[0];
6356    HInstruction* instr = NULL;
6357    if (untransitionable_map->has_slow_elements_kind() ||
6358        !untransitionable_map->IsJSObjectMap()) {
6359      instr = AddInstruction(is_store ? BuildStoreKeyedGeneric(object, key, val)
6360                                      : BuildLoadKeyedGeneric(object, key));
6361    } else {
6362      instr = BuildMonomorphicElementAccess(
6363          object, key, val, transition, untransitionable_map, is_store,
6364          store_mode);
6365    }
6366    *has_side_effects |= instr->HasObservableSideEffects();
6367    return is_store ? NULL : instr;
6368  }
6369
6370  HBasicBlock* join = graph()->CreateBasicBlock();
6371
6372  for (int i = 0; i < untransitionable_maps.length(); ++i) {
6373    Handle<Map> map = untransitionable_maps[i];
6374    if (!map->IsJSObjectMap()) continue;
6375    ElementsKind elements_kind = map->elements_kind();
6376    HBasicBlock* this_map = graph()->CreateBasicBlock();
6377    HBasicBlock* other_map = graph()->CreateBasicBlock();
6378    HCompareMap* mapcompare =
6379        New<HCompareMap>(object, map, this_map, other_map);
6380    FinishCurrentBlock(mapcompare);
6381
6382    set_current_block(this_map);
6383    HInstruction* access = NULL;
6384    if (IsDictionaryElementsKind(elements_kind)) {
6385      access = is_store
6386          ? AddInstruction(BuildStoreKeyedGeneric(object, key, val))
6387          : AddInstruction(BuildLoadKeyedGeneric(object, key));
6388    } else {
6389      ASSERT(IsFastElementsKind(elements_kind) ||
6390             IsExternalArrayElementsKind(elements_kind));
6391      LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
6392      // Happily, mapcompare is a checked object.
6393      access = BuildUncheckedMonomorphicElementAccess(
6394          mapcompare, key, val,
6395          map->instance_type() == JS_ARRAY_TYPE,
6396          elements_kind, is_store,
6397          load_mode,
6398          store_mode);
6399    }
6400    *has_side_effects |= access->HasObservableSideEffects();
6401    // The caller will use has_side_effects and add a correct Simulate.
6402    access->SetFlag(HValue::kHasNoObservableSideEffects);
6403    if (!is_store) {
6404      Push(access);
6405    }
6406    NoObservableSideEffectsScope scope(this);
6407    GotoNoSimulate(join);
6408    set_current_block(other_map);
6409  }
6410
6411  // Deopt if none of the cases matched.
6412  NoObservableSideEffectsScope scope(this);
6413  FinishExitWithHardDeoptimization("Unknown map in polymorphic element access",
6414                                   join);
6415  set_current_block(join);
6416  return is_store ? NULL : Pop();
6417}
6418
6419
6420HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
6421    HValue* obj,
6422    HValue* key,
6423    HValue* val,
6424    Expression* expr,
6425    bool is_store,
6426    bool* has_side_effects) {
6427  ASSERT(!expr->IsPropertyName());
6428  HInstruction* instr = NULL;
6429
6430  SmallMapList* types;
6431  bool monomorphic = ComputeReceiverTypes(expr, obj, &types);
6432
6433  bool force_generic = false;
6434  if (is_store && (monomorphic || (types != NULL && !types->is_empty()))) {
6435    // Stores can't be mono/polymorphic if their prototype chain has dictionary
6436    // elements. However a receiver map that has dictionary elements itself
6437    // should be left to normal mono/poly behavior (the other maps may benefit
6438    // from highly optimized stores).
6439    for (int i = 0; i < types->length(); i++) {
6440      Handle<Map> current_map = types->at(i);
6441      if (current_map->DictionaryElementsInPrototypeChainOnly()) {
6442        force_generic = true;
6443        monomorphic = false;
6444        break;
6445      }
6446    }
6447  }
6448
6449  if (monomorphic) {
6450    Handle<Map> map = types->first();
6451    if (map->has_slow_elements_kind()) {
6452      instr = is_store ? BuildStoreKeyedGeneric(obj, key, val)
6453                       : BuildLoadKeyedGeneric(obj, key);
6454      AddInstruction(instr);
6455    } else {
6456      BuildCheckHeapObject(obj);
6457      instr = BuildMonomorphicElementAccess(
6458          obj, key, val, NULL, map, is_store, expr->GetStoreMode());
6459    }
6460  } else if (!force_generic && (types != NULL && !types->is_empty())) {
6461    return HandlePolymorphicElementAccess(
6462        obj, key, val, types, is_store,
6463        expr->GetStoreMode(), has_side_effects);
6464  } else {
6465    if (is_store) {
6466      if (expr->IsAssignment() &&
6467          expr->AsAssignment()->HasNoTypeInformation()) {
6468        Add<HDeoptimize>("Insufficient type feedback for keyed store",
6469                         Deoptimizer::SOFT);
6470      }
6471      instr = BuildStoreKeyedGeneric(obj, key, val);
6472    } else {
6473      if (expr->AsProperty()->HasNoTypeInformation()) {
6474        Add<HDeoptimize>("Insufficient type feedback for keyed load",
6475                         Deoptimizer::SOFT);
6476      }
6477      instr = BuildLoadKeyedGeneric(obj, key);
6478    }
6479    AddInstruction(instr);
6480  }
6481  *has_side_effects = instr->HasObservableSideEffects();
6482  return instr;
6483}
6484
6485
6486HInstruction* HOptimizedGraphBuilder::BuildStoreKeyedGeneric(
6487    HValue* object,
6488    HValue* key,
6489    HValue* value) {
6490  return New<HStoreKeyedGeneric>(
6491                         object,
6492                         key,
6493                         value,
6494                         function_strict_mode_flag());
6495}
6496
6497
6498void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
6499  // Outermost function already has arguments on the stack.
6500  if (function_state()->outer() == NULL) return;
6501
6502  if (function_state()->arguments_pushed()) return;
6503
6504  // Push arguments when entering inlined function.
6505  HEnterInlined* entry = function_state()->entry();
6506  entry->set_arguments_pushed();
6507
6508  HArgumentsObject* arguments = entry->arguments_object();
6509  const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
6510
6511  HInstruction* insert_after = entry;
6512  for (int i = 0; i < arguments_values->length(); i++) {
6513    HValue* argument = arguments_values->at(i);
6514    HInstruction* push_argument = New<HPushArgument>(argument);
6515    push_argument->InsertAfter(insert_after);
6516    insert_after = push_argument;
6517  }
6518
6519  HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
6520  arguments_elements->ClearFlag(HValue::kUseGVN);
6521  arguments_elements->InsertAfter(insert_after);
6522  function_state()->set_arguments_elements(arguments_elements);
6523}
6524
6525
6526bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
6527  VariableProxy* proxy = expr->obj()->AsVariableProxy();
6528  if (proxy == NULL) return false;
6529  if (!proxy->var()->IsStackAllocated()) return false;
6530  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
6531    return false;
6532  }
6533
6534  HInstruction* result = NULL;
6535  if (expr->key()->IsPropertyName()) {
6536    Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6537    if (!name->IsOneByteEqualTo(STATIC_ASCII_VECTOR("length"))) return false;
6538
6539    if (function_state()->outer() == NULL) {
6540      HInstruction* elements = Add<HArgumentsElements>(false);
6541      result = New<HArgumentsLength>(elements);
6542    } else {
6543      // Number of arguments without receiver.
6544      int argument_count = environment()->
6545          arguments_environment()->parameter_count() - 1;
6546      result = New<HConstant>(argument_count);
6547    }
6548  } else {
6549    Push(graph()->GetArgumentsObject());
6550    CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
6551    HValue* key = Pop();
6552    Drop(1);  // Arguments object.
6553    if (function_state()->outer() == NULL) {
6554      HInstruction* elements = Add<HArgumentsElements>(false);
6555      HInstruction* length = Add<HArgumentsLength>(elements);
6556      HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6557      result = New<HAccessArgumentsAt>(elements, length, checked_key);
6558    } else {
6559      EnsureArgumentsArePushedForAccess();
6560
6561      // Number of arguments without receiver.
6562      HInstruction* elements = function_state()->arguments_elements();
6563      int argument_count = environment()->
6564          arguments_environment()->parameter_count() - 1;
6565      HInstruction* length = Add<HConstant>(argument_count);
6566      HInstruction* checked_key = Add<HBoundsCheck>(key, length);
6567      result = New<HAccessArgumentsAt>(elements, length, checked_key);
6568    }
6569  }
6570  ast_context()->ReturnInstruction(result, expr->id());
6571  return true;
6572}
6573
6574
6575void HOptimizedGraphBuilder::PushLoad(Property* expr,
6576                                      HValue* object,
6577                                      HValue* key) {
6578  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
6579  Push(object);
6580  if (key != NULL) Push(key);
6581  BuildLoad(expr, expr->LoadId());
6582}
6583
6584
6585static bool AreStringTypes(SmallMapList* types) {
6586  for (int i = 0; i < types->length(); i++) {
6587    if (types->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6588  }
6589  return true;
6590}
6591
6592
6593void HOptimizedGraphBuilder::BuildLoad(Property* expr,
6594                                       BailoutId ast_id) {
6595  HInstruction* instr = NULL;
6596  if (expr->IsStringAccess()) {
6597    HValue* index = Pop();
6598    HValue* string = Pop();
6599    HInstruction* char_code = BuildStringCharCodeAt(string, index);
6600    AddInstruction(char_code);
6601    instr = NewUncasted<HStringCharFromCode>(char_code);
6602
6603  } else if (expr->IsFunctionPrototype()) {
6604    HValue* function = Pop();
6605    BuildCheckHeapObject(function);
6606    instr = New<HLoadFunctionPrototype>(function);
6607
6608  } else if (expr->key()->IsPropertyName()) {
6609    Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
6610    HValue* object = Pop();
6611
6612    SmallMapList* types;
6613    ComputeReceiverTypes(expr, object, &types);
6614    ASSERT(types != NULL);
6615
6616    if (types->length() > 0) {
6617      PropertyAccessInfo info(isolate(), types->first(), name);
6618      if (!info.CanLoadAsMonomorphic(types)) {
6619        return HandlePolymorphicLoadNamedField(
6620            ast_id, expr->LoadId(), object, types, name);
6621      }
6622
6623      BuildCheckHeapObject(object);
6624      HInstruction* checked_object;
6625      if (AreStringTypes(types)) {
6626        checked_object =
6627            Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
6628      } else {
6629        checked_object = Add<HCheckMaps>(object, types);
6630      }
6631      instr = BuildLoadMonomorphic(
6632          &info, object, checked_object, ast_id, expr->LoadId());
6633      if (instr == NULL) return;
6634      if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
6635    } else {
6636      instr = BuildLoadNamedGeneric(object, name, expr);
6637    }
6638
6639  } else {
6640    HValue* key = Pop();
6641    HValue* obj = Pop();
6642
6643    bool has_side_effects = false;
6644    HValue* load = HandleKeyedElementAccess(
6645        obj, key, NULL, expr,
6646        false,  // is_store
6647        &has_side_effects);
6648    if (has_side_effects) {
6649      if (ast_context()->IsEffect()) {
6650        Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6651      } else {
6652        Push(load);
6653        Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6654        Drop(1);
6655      }
6656    }
6657    return ast_context()->ReturnValue(load);
6658  }
6659  return ast_context()->ReturnInstruction(instr, ast_id);
6660}
6661
6662
6663void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
6664  ASSERT(!HasStackOverflow());
6665  ASSERT(current_block() != NULL);
6666  ASSERT(current_block()->HasPredecessor());
6667
6668  if (TryArgumentsAccess(expr)) return;
6669
6670  CHECK_ALIVE(VisitForValue(expr->obj()));
6671  if ((!expr->IsFunctionPrototype() && !expr->key()->IsPropertyName()) ||
6672      expr->IsStringAccess()) {
6673    CHECK_ALIVE(VisitForValue(expr->key()));
6674  }
6675
6676  BuildLoad(expr, expr->id());
6677}
6678
6679
6680HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant,
6681                                                   CompilationInfo* info) {
6682  HConstant* constant_value = New<HConstant>(constant);
6683
6684  if (constant->map()->CanOmitMapChecks()) {
6685    constant->map()->AddDependentCompilationInfo(
6686        DependentCode::kPrototypeCheckGroup, info);
6687    return constant_value;
6688  }
6689
6690  AddInstruction(constant_value);
6691  HCheckMaps* check =
6692      Add<HCheckMaps>(constant_value, handle(constant->map()), info);
6693  check->ClearGVNFlag(kDependsOnElementsKind);
6694  return check;
6695}
6696
6697
6698HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
6699                                                     Handle<JSObject> holder) {
6700  while (!prototype.is_identical_to(holder)) {
6701    BuildConstantMapCheck(prototype, top_info());
6702    prototype = handle(JSObject::cast(prototype->GetPrototype()));
6703  }
6704
6705  HInstruction* checked_object = BuildConstantMapCheck(prototype, top_info());
6706  if (!checked_object->IsLinked()) AddInstruction(checked_object);
6707  return checked_object;
6708}
6709
6710
6711void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
6712                                                   Handle<Map> receiver_map) {
6713  if (!holder.is_null()) {
6714    Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
6715    BuildCheckPrototypeMaps(prototype, holder);
6716  }
6717}
6718
6719
6720void HOptimizedGraphBuilder::AddCheckConstantFunction(
6721    Handle<JSObject> holder,
6722    HValue* receiver,
6723    Handle<Map> receiver_map) {
6724  // Constant functions have the nice property that the map will change if they
6725  // are overwritten.  Therefore it is enough to check the map of the holder and
6726  // its prototypes.
6727  AddCheckMap(receiver, receiver_map);
6728  AddCheckPrototypeMaps(holder, receiver_map);
6729}
6730
6731
6732class FunctionSorter {
6733 public:
6734  FunctionSorter() : index_(0), ticks_(0), ast_length_(0), src_length_(0) { }
6735  FunctionSorter(int index, int ticks, int ast_length, int src_length)
6736      : index_(index),
6737        ticks_(ticks),
6738        ast_length_(ast_length),
6739        src_length_(src_length) { }
6740
6741  int index() const { return index_; }
6742  int ticks() const { return ticks_; }
6743  int ast_length() const { return ast_length_; }
6744  int src_length() const { return src_length_; }
6745
6746 private:
6747  int index_;
6748  int ticks_;
6749  int ast_length_;
6750  int src_length_;
6751};
6752
6753
6754inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
6755  int diff = lhs.ticks() - rhs.ticks();
6756  if (diff != 0) return diff > 0;
6757  diff = lhs.ast_length() - rhs.ast_length();
6758  if (diff != 0) return diff < 0;
6759  return lhs.src_length() < rhs.src_length();
6760}
6761
6762
6763bool HOptimizedGraphBuilder::TryCallPolymorphicAsMonomorphic(
6764    Call* expr,
6765    HValue* receiver,
6766    SmallMapList* types,
6767    Handle<String> name) {
6768  if (types->length() > kMaxCallPolymorphism) return false;
6769
6770  PropertyAccessInfo info(isolate(), types->at(0), name);
6771  if (!info.CanLoadAsMonomorphic(types)) return false;
6772  if (!expr->ComputeTarget(info.map(), name)) return false;
6773
6774  BuildCheckHeapObject(receiver);
6775  Add<HCheckMaps>(receiver, types);
6776  AddCheckPrototypeMaps(expr->holder(), info.map());
6777  if (FLAG_trace_inlining) {
6778    Handle<JSFunction> caller = current_info()->closure();
6779    SmartArrayPointer<char> caller_name =
6780        caller->shared()->DebugName()->ToCString();
6781    PrintF("Trying to inline the polymorphic call to %s from %s\n",
6782           *name->ToCString(), *caller_name);
6783  }
6784
6785  if (!TryInlineCall(expr)) {
6786    int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
6787    HCallConstantFunction* call =
6788      New<HCallConstantFunction>(expr->target(), argument_count);
6789    PreProcessCall(call);
6790    AddInstruction(call);
6791    if (!ast_context()->IsEffect()) Push(call);
6792    Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
6793    if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6794  }
6795
6796  return true;
6797}
6798
6799
6800void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(
6801    Call* expr,
6802    HValue* receiver,
6803    SmallMapList* types,
6804    Handle<String> name) {
6805  if (TryCallPolymorphicAsMonomorphic(expr, receiver, types, name)) return;
6806
6807  int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
6808  HBasicBlock* join = NULL;
6809  FunctionSorter order[kMaxCallPolymorphism];
6810  int ordered_functions = 0;
6811
6812  Handle<Map> initial_string_map(
6813      isolate()->native_context()->string_function()->initial_map());
6814  Handle<Map> string_marker_map(
6815      JSObject::cast(initial_string_map->prototype())->map());
6816  Handle<Map> initial_number_map(
6817      isolate()->native_context()->number_function()->initial_map());
6818  Handle<Map> number_marker_map(
6819      JSObject::cast(initial_number_map->prototype())->map());
6820  Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6821
6822  bool handle_smi = false;
6823
6824  for (int i = 0;
6825       i < types->length() && ordered_functions < kMaxCallPolymorphism;
6826       ++i) {
6827    Handle<Map> map = types->at(i);
6828    if (expr->ComputeTarget(map, name)) {
6829      if (map.is_identical_to(number_marker_map)) handle_smi = true;
6830      order[ordered_functions++] =
6831          FunctionSorter(i,
6832                         expr->target()->shared()->profiler_ticks(),
6833                         InliningAstSize(expr->target()),
6834                         expr->target()->shared()->SourceSize());
6835    }
6836  }
6837
6838  std::sort(order, order + ordered_functions);
6839
6840  HBasicBlock* number_block = NULL;
6841
6842  for (int fn = 0; fn < ordered_functions; ++fn) {
6843    int i = order[fn].index();
6844    Handle<Map> map = types->at(i);
6845    if (fn == 0) {
6846      // Only needed once.
6847      join = graph()->CreateBasicBlock();
6848      if (handle_smi) {
6849        HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6850        HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6851        number_block = graph()->CreateBasicBlock();
6852        FinishCurrentBlock(New<HIsSmiAndBranch>(
6853                receiver, empty_smi_block, not_smi_block));
6854        Goto(empty_smi_block, number_block);
6855        set_current_block(not_smi_block);
6856      } else {
6857        BuildCheckHeapObject(receiver);
6858      }
6859    }
6860    HBasicBlock* if_true = graph()->CreateBasicBlock();
6861    HBasicBlock* if_false = graph()->CreateBasicBlock();
6862    HUnaryControlInstruction* compare;
6863
6864    if (handle_smi && map.is_identical_to(number_marker_map)) {
6865      compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
6866      map = initial_number_map;
6867      expr->set_number_check(
6868          Handle<JSObject>(JSObject::cast(map->prototype())));
6869    } else if (map.is_identical_to(string_marker_map)) {
6870      compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
6871      map = initial_string_map;
6872      expr->set_string_check(
6873          Handle<JSObject>(JSObject::cast(map->prototype())));
6874    } else {
6875      compare = New<HCompareMap>(receiver, map, if_true, if_false);
6876      expr->set_map_check();
6877    }
6878
6879    FinishCurrentBlock(compare);
6880
6881    if (expr->check_type() == NUMBER_CHECK) {
6882      Goto(if_true, number_block);
6883      if_true = number_block;
6884      number_block->SetJoinId(expr->id());
6885    }
6886    set_current_block(if_true);
6887
6888    expr->ComputeTarget(map, name);
6889    AddCheckPrototypeMaps(expr->holder(), map);
6890    if (FLAG_trace_inlining && FLAG_polymorphic_inlining) {
6891      Handle<JSFunction> caller = current_info()->closure();
6892      SmartArrayPointer<char> caller_name =
6893          caller->shared()->DebugName()->ToCString();
6894      PrintF("Trying to inline the polymorphic call to %s from %s\n",
6895             *name->ToCString(),
6896             *caller_name);
6897    }
6898    if (FLAG_polymorphic_inlining && TryInlineCall(expr)) {
6899      // Trying to inline will signal that we should bailout from the
6900      // entire compilation by setting stack overflow on the visitor.
6901      if (HasStackOverflow()) return;
6902    } else {
6903      HCallConstantFunction* call =
6904          New<HCallConstantFunction>(expr->target(), argument_count);
6905      PreProcessCall(call);
6906      AddInstruction(call);
6907      if (!ast_context()->IsEffect()) Push(call);
6908    }
6909
6910    if (current_block() != NULL) Goto(join);
6911    set_current_block(if_false);
6912  }
6913
6914  // Finish up.  Unconditionally deoptimize if we've handled all the maps we
6915  // know about and do not want to handle ones we've never seen.  Otherwise
6916  // use a generic IC.
6917  if (ordered_functions == types->length() && FLAG_deoptimize_uncommon_cases) {
6918    // Because the deopt may be the only path in the polymorphic call, make sure
6919    // that the environment stack matches the depth on deopt that it otherwise
6920    // would have had after a successful call.
6921    Drop(argument_count);
6922    if (!ast_context()->IsEffect()) Push(graph()->GetConstant0());
6923    FinishExitWithHardDeoptimization("Unknown map in polymorphic call", join);
6924  } else {
6925    HCallNamed* call = New<HCallNamed>(name, argument_count);
6926    PreProcessCall(call);
6927
6928    if (join != NULL) {
6929      AddInstruction(call);
6930      if (!ast_context()->IsEffect()) Push(call);
6931      Goto(join);
6932    } else {
6933      return ast_context()->ReturnInstruction(call, expr->id());
6934    }
6935  }
6936
6937  // We assume that control flow is always live after an expression.  So
6938  // even without predecessors to the join block, we set it as the exit
6939  // block and continue by adding instructions there.
6940  ASSERT(join != NULL);
6941  if (join->HasPredecessor()) {
6942    set_current_block(join);
6943    join->SetJoinId(expr->id());
6944    if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
6945  } else {
6946    set_current_block(NULL);
6947  }
6948}
6949
6950
6951void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
6952                                         Handle<JSFunction> caller,
6953                                         const char* reason) {
6954  if (FLAG_trace_inlining) {
6955    SmartArrayPointer<char> target_name =
6956        target->shared()->DebugName()->ToCString();
6957    SmartArrayPointer<char> caller_name =
6958        caller->shared()->DebugName()->ToCString();
6959    if (reason == NULL) {
6960      PrintF("Inlined %s called from %s.\n", *target_name, *caller_name);
6961    } else {
6962      PrintF("Did not inline %s called from %s (%s).\n",
6963             *target_name, *caller_name, reason);
6964    }
6965  }
6966}
6967
6968
6969static const int kNotInlinable = 1000000000;
6970
6971
6972int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
6973  if (!FLAG_use_inlining) return kNotInlinable;
6974
6975  // Precondition: call is monomorphic and we have found a target with the
6976  // appropriate arity.
6977  Handle<JSFunction> caller = current_info()->closure();
6978  Handle<SharedFunctionInfo> target_shared(target->shared());
6979
6980  // Always inline builtins marked for inlining.
6981  if (target->IsBuiltin()) {
6982    return target_shared->inline_builtin() ? 0 : kNotInlinable;
6983  }
6984
6985  // Do a quick check on source code length to avoid parsing large
6986  // inlining candidates.
6987  if (target_shared->SourceSize() >
6988      Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
6989    TraceInline(target, caller, "target text too big");
6990    return kNotInlinable;
6991  }
6992
6993  // Target must be inlineable.
6994  if (!target_shared->IsInlineable()) {
6995    TraceInline(target, caller, "target not inlineable");
6996    return kNotInlinable;
6997  }
6998  if (target_shared->dont_inline() || target_shared->dont_optimize()) {
6999    TraceInline(target, caller, "target contains unsupported syntax [early]");
7000    return kNotInlinable;
7001  }
7002
7003  int nodes_added = target_shared->ast_node_count();
7004  return nodes_added;
7005}
7006
7007
7008bool HOptimizedGraphBuilder::TryInline(CallKind call_kind,
7009                                       Handle<JSFunction> target,
7010                                       int arguments_count,
7011                                       HValue* implicit_return_value,
7012                                       BailoutId ast_id,
7013                                       BailoutId return_id,
7014                                       InliningKind inlining_kind) {
7015  int nodes_added = InliningAstSize(target);
7016  if (nodes_added == kNotInlinable) return false;
7017
7018  Handle<JSFunction> caller = current_info()->closure();
7019
7020  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7021    TraceInline(target, caller, "target AST is too large [early]");
7022    return false;
7023  }
7024
7025  // Don't inline deeper than the maximum number of inlining levels.
7026  HEnvironment* env = environment();
7027  int current_level = 1;
7028  while (env->outer() != NULL) {
7029    if (current_level == FLAG_max_inlining_levels) {
7030      TraceInline(target, caller, "inline depth limit reached");
7031      return false;
7032    }
7033    if (env->outer()->frame_type() == JS_FUNCTION) {
7034      current_level++;
7035    }
7036    env = env->outer();
7037  }
7038
7039  // Don't inline recursive functions.
7040  for (FunctionState* state = function_state();
7041       state != NULL;
7042       state = state->outer()) {
7043    if (*state->compilation_info()->closure() == *target) {
7044      TraceInline(target, caller, "target is recursive");
7045      return false;
7046    }
7047  }
7048
7049  // We don't want to add more than a certain number of nodes from inlining.
7050  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
7051                           kUnlimitedMaxInlinedNodesCumulative)) {
7052    TraceInline(target, caller, "cumulative AST node limit reached");
7053    return false;
7054  }
7055
7056  // Parse and allocate variables.
7057  CompilationInfo target_info(target, zone());
7058  Handle<SharedFunctionInfo> target_shared(target->shared());
7059  if (!Parser::Parse(&target_info) || !Scope::Analyze(&target_info)) {
7060    if (target_info.isolate()->has_pending_exception()) {
7061      // Parse or scope error, never optimize this function.
7062      SetStackOverflow();
7063      target_shared->DisableOptimization(kParseScopeError);
7064    }
7065    TraceInline(target, caller, "parse failure");
7066    return false;
7067  }
7068
7069  if (target_info.scope()->num_heap_slots() > 0) {
7070    TraceInline(target, caller, "target has context-allocated variables");
7071    return false;
7072  }
7073  FunctionLiteral* function = target_info.function();
7074
7075  // The following conditions must be checked again after re-parsing, because
7076  // earlier the information might not have been complete due to lazy parsing.
7077  nodes_added = function->ast_node_count();
7078  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
7079    TraceInline(target, caller, "target AST is too large [late]");
7080    return false;
7081  }
7082  AstProperties::Flags* flags(function->flags());
7083  if (flags->Contains(kDontInline) || function->dont_optimize()) {
7084    TraceInline(target, caller, "target contains unsupported syntax [late]");
7085    return false;
7086  }
7087
7088  // If the function uses the arguments object check that inlining of functions
7089  // with arguments object is enabled and the arguments-variable is
7090  // stack allocated.
7091  if (function->scope()->arguments() != NULL) {
7092    if (!FLAG_inline_arguments) {
7093      TraceInline(target, caller, "target uses arguments object");
7094      return false;
7095    }
7096
7097    if (!function->scope()->arguments()->IsStackAllocated()) {
7098      TraceInline(target,
7099                  caller,
7100                  "target uses non-stackallocated arguments object");
7101      return false;
7102    }
7103  }
7104
7105  // All declarations must be inlineable.
7106  ZoneList<Declaration*>* decls = target_info.scope()->declarations();
7107  int decl_count = decls->length();
7108  for (int i = 0; i < decl_count; ++i) {
7109    if (!decls->at(i)->IsInlineable()) {
7110      TraceInline(target, caller, "target has non-trivial declaration");
7111      return false;
7112    }
7113  }
7114
7115  // Generate the deoptimization data for the unoptimized version of
7116  // the target function if we don't already have it.
7117  if (!target_shared->has_deoptimization_support()) {
7118    // Note that we compile here using the same AST that we will use for
7119    // generating the optimized inline code.
7120    target_info.EnableDeoptimizationSupport();
7121    if (!FullCodeGenerator::MakeCode(&target_info)) {
7122      TraceInline(target, caller, "could not generate deoptimization info");
7123      return false;
7124    }
7125    if (target_shared->scope_info() == ScopeInfo::Empty(isolate())) {
7126      // The scope info might not have been set if a lazily compiled
7127      // function is inlined before being called for the first time.
7128      Handle<ScopeInfo> target_scope_info =
7129          ScopeInfo::Create(target_info.scope(), zone());
7130      target_shared->set_scope_info(*target_scope_info);
7131    }
7132    target_shared->EnableDeoptimizationSupport(*target_info.code());
7133    Compiler::RecordFunctionCompilation(Logger::FUNCTION_TAG,
7134                                        &target_info,
7135                                        target_shared);
7136  }
7137
7138  // ----------------------------------------------------------------
7139  // After this point, we've made a decision to inline this function (so
7140  // TryInline should always return true).
7141
7142  // Type-check the inlined function.
7143  ASSERT(target_shared->has_deoptimization_support());
7144  AstTyper::Run(&target_info);
7145
7146  // Save the pending call context. Set up new one for the inlined function.
7147  // The function state is new-allocated because we need to delete it
7148  // in two different places.
7149  FunctionState* target_state = new FunctionState(
7150      this, &target_info, inlining_kind);
7151
7152  HConstant* undefined = graph()->GetConstantUndefined();
7153  bool undefined_receiver = HEnvironment::UseUndefinedReceiver(
7154      target, function, call_kind, inlining_kind);
7155  HEnvironment* inner_env =
7156      environment()->CopyForInlining(target,
7157                                     arguments_count,
7158                                     function,
7159                                     undefined,
7160                                     function_state()->inlining_kind(),
7161                                     undefined_receiver);
7162
7163  HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
7164  inner_env->BindContext(context);
7165
7166  Add<HSimulate>(return_id);
7167  current_block()->UpdateEnvironment(inner_env);
7168  HArgumentsObject* arguments_object = NULL;
7169
7170  // If the function uses arguments object create and bind one, also copy
7171  // current arguments values to use them for materialization.
7172  if (function->scope()->arguments() != NULL) {
7173    ASSERT(function->scope()->arguments()->IsStackAllocated());
7174    HEnvironment* arguments_env = inner_env->arguments_environment();
7175    int arguments_count = arguments_env->parameter_count();
7176    arguments_object = Add<HArgumentsObject>(arguments_count);
7177    inner_env->Bind(function->scope()->arguments(), arguments_object);
7178    for (int i = 0; i < arguments_count; i++) {
7179      arguments_object->AddArgument(arguments_env->Lookup(i), zone());
7180    }
7181  }
7182
7183  HEnterInlined* enter_inlined =
7184      Add<HEnterInlined>(target, arguments_count, function,
7185                         function_state()->inlining_kind(),
7186                         function->scope()->arguments(),
7187                         arguments_object, undefined_receiver);
7188  function_state()->set_entry(enter_inlined);
7189
7190  VisitDeclarations(target_info.scope()->declarations());
7191  VisitStatements(function->body());
7192  if (HasStackOverflow()) {
7193    // Bail out if the inline function did, as we cannot residualize a call
7194    // instead.
7195    TraceInline(target, caller, "inline graph construction failed");
7196    target_shared->DisableOptimization(kInliningBailedOut);
7197    inline_bailout_ = true;
7198    delete target_state;
7199    return true;
7200  }
7201
7202  // Update inlined nodes count.
7203  inlined_count_ += nodes_added;
7204
7205  Handle<Code> unoptimized_code(target_shared->code());
7206  ASSERT(unoptimized_code->kind() == Code::FUNCTION);
7207  Handle<TypeFeedbackInfo> type_info(
7208      TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
7209  graph()->update_type_change_checksum(type_info->own_type_change_checksum());
7210
7211  TraceInline(target, caller, NULL);
7212
7213  if (current_block() != NULL) {
7214    FunctionState* state = function_state();
7215    if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
7216      // Falling off the end of an inlined construct call. In a test context the
7217      // return value will always evaluate to true, in a value context the
7218      // return value is the newly allocated receiver.
7219      if (call_context()->IsTest()) {
7220        Goto(inlined_test_context()->if_true(), state);
7221      } else if (call_context()->IsEffect()) {
7222        Goto(function_return(), state);
7223      } else {
7224        ASSERT(call_context()->IsValue());
7225        AddLeaveInlined(implicit_return_value, state);
7226      }
7227    } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
7228      // Falling off the end of an inlined setter call. The returned value is
7229      // never used, the value of an assignment is always the value of the RHS
7230      // of the assignment.
7231      if (call_context()->IsTest()) {
7232        inlined_test_context()->ReturnValue(implicit_return_value);
7233      } else if (call_context()->IsEffect()) {
7234        Goto(function_return(), state);
7235      } else {
7236        ASSERT(call_context()->IsValue());
7237        AddLeaveInlined(implicit_return_value, state);
7238      }
7239    } else {
7240      // Falling off the end of a normal inlined function. This basically means
7241      // returning undefined.
7242      if (call_context()->IsTest()) {
7243        Goto(inlined_test_context()->if_false(), state);
7244      } else if (call_context()->IsEffect()) {
7245        Goto(function_return(), state);
7246      } else {
7247        ASSERT(call_context()->IsValue());
7248        AddLeaveInlined(undefined, state);
7249      }
7250    }
7251  }
7252
7253  // Fix up the function exits.
7254  if (inlined_test_context() != NULL) {
7255    HBasicBlock* if_true = inlined_test_context()->if_true();
7256    HBasicBlock* if_false = inlined_test_context()->if_false();
7257
7258    HEnterInlined* entry = function_state()->entry();
7259
7260    // Pop the return test context from the expression context stack.
7261    ASSERT(ast_context() == inlined_test_context());
7262    ClearInlinedTestContext();
7263    delete target_state;
7264
7265    // Forward to the real test context.
7266    if (if_true->HasPredecessor()) {
7267      entry->RegisterReturnTarget(if_true, zone());
7268      if_true->SetJoinId(ast_id);
7269      HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
7270      Goto(if_true, true_target, function_state());
7271    }
7272    if (if_false->HasPredecessor()) {
7273      entry->RegisterReturnTarget(if_false, zone());
7274      if_false->SetJoinId(ast_id);
7275      HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
7276      Goto(if_false, false_target, function_state());
7277    }
7278    set_current_block(NULL);
7279    return true;
7280
7281  } else if (function_return()->HasPredecessor()) {
7282    function_state()->entry()->RegisterReturnTarget(function_return(), zone());
7283    function_return()->SetJoinId(ast_id);
7284    set_current_block(function_return());
7285  } else {
7286    set_current_block(NULL);
7287  }
7288  delete target_state;
7289  return true;
7290}
7291
7292
7293bool HOptimizedGraphBuilder::TryInlineCall(Call* expr, bool drop_extra) {
7294  // The function call we are inlining is a method call if the call
7295  // is a property call.
7296  CallKind call_kind = (expr->expression()->AsProperty() == NULL)
7297      ? CALL_AS_FUNCTION
7298      : CALL_AS_METHOD;
7299
7300  return TryInline(call_kind,
7301                   expr->target(),
7302                   expr->arguments()->length(),
7303                   NULL,
7304                   expr->id(),
7305                   expr->ReturnId(),
7306                   drop_extra ? DROP_EXTRA_ON_RETURN : NORMAL_RETURN);
7307}
7308
7309
7310bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
7311                                                HValue* implicit_return_value) {
7312  return TryInline(CALL_AS_FUNCTION,
7313                   expr->target(),
7314                   expr->arguments()->length(),
7315                   implicit_return_value,
7316                   expr->id(),
7317                   expr->ReturnId(),
7318                   CONSTRUCT_CALL_RETURN);
7319}
7320
7321
7322bool HOptimizedGraphBuilder::TryInlineGetter(Handle<JSFunction> getter,
7323                                             BailoutId ast_id,
7324                                             BailoutId return_id) {
7325  return TryInline(CALL_AS_METHOD,
7326                   getter,
7327                   0,
7328                   NULL,
7329                   ast_id,
7330                   return_id,
7331                   GETTER_CALL_RETURN);
7332}
7333
7334
7335bool HOptimizedGraphBuilder::TryInlineSetter(Handle<JSFunction> setter,
7336                                             BailoutId id,
7337                                             BailoutId assignment_id,
7338                                             HValue* implicit_return_value) {
7339  return TryInline(CALL_AS_METHOD,
7340                   setter,
7341                   1,
7342                   implicit_return_value,
7343                   id, assignment_id,
7344                   SETTER_CALL_RETURN);
7345}
7346
7347
7348bool HOptimizedGraphBuilder::TryInlineApply(Handle<JSFunction> function,
7349                                            Call* expr,
7350                                            int arguments_count) {
7351  return TryInline(CALL_AS_METHOD,
7352                   function,
7353                   arguments_count,
7354                   NULL,
7355                   expr->id(),
7356                   expr->ReturnId(),
7357                   NORMAL_RETURN);
7358}
7359
7360
7361bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr,
7362                                                          bool drop_extra) {
7363  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7364  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7365  switch (id) {
7366    case kMathExp:
7367      if (!FLAG_fast_math) break;
7368      // Fall through if FLAG_fast_math.
7369    case kMathRound:
7370    case kMathFloor:
7371    case kMathAbs:
7372    case kMathSqrt:
7373    case kMathLog:
7374      if (expr->arguments()->length() == 1) {
7375        HValue* argument = Pop();
7376        Drop(1);  // Receiver.
7377        HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7378        if (drop_extra) Drop(1);  // Optionally drop the function.
7379        ast_context()->ReturnInstruction(op, expr->id());
7380        return true;
7381      }
7382      break;
7383    case kMathImul:
7384      if (expr->arguments()->length() == 2) {
7385        HValue* right = Pop();
7386        HValue* left = Pop();
7387        Drop(1);  // Receiver.
7388        HInstruction* op = HMul::NewImul(zone(), context(), left, right);
7389        if (drop_extra) Drop(1);  // Optionally drop the function.
7390        ast_context()->ReturnInstruction(op, expr->id());
7391        return true;
7392      }
7393      break;
7394    default:
7395      // Not supported for inlining yet.
7396      break;
7397  }
7398  return false;
7399}
7400
7401
7402bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
7403    Call* expr,
7404    HValue* receiver,
7405    Handle<Map> receiver_map,
7406    CheckType check_type) {
7407  ASSERT(check_type != RECEIVER_MAP_CHECK || !receiver_map.is_null());
7408  // Try to inline calls like Math.* as operations in the calling function.
7409  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
7410  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
7411  int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
7412  switch (id) {
7413    case kStringCharCodeAt:
7414    case kStringCharAt:
7415      if (argument_count == 2 && check_type == STRING_CHECK) {
7416        HValue* index = Pop();
7417        HValue* string = Pop();
7418        ASSERT(!expr->holder().is_null());
7419        BuildCheckPrototypeMaps(Call::GetPrototypeForPrimitiveCheck(
7420                STRING_CHECK, expr->holder()->GetIsolate()),
7421            expr->holder());
7422        HInstruction* char_code =
7423            BuildStringCharCodeAt(string, index);
7424        if (id == kStringCharCodeAt) {
7425          ast_context()->ReturnInstruction(char_code, expr->id());
7426          return true;
7427        }
7428        AddInstruction(char_code);
7429        HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
7430        ast_context()->ReturnInstruction(result, expr->id());
7431        return true;
7432      }
7433      break;
7434    case kStringFromCharCode:
7435      if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
7436        AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
7437        HValue* argument = Pop();
7438        Drop(1);  // Receiver.
7439        HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
7440        ast_context()->ReturnInstruction(result, expr->id());
7441        return true;
7442      }
7443      break;
7444    case kMathExp:
7445      if (!FLAG_fast_math) break;
7446      // Fall through if FLAG_fast_math.
7447    case kMathRound:
7448    case kMathFloor:
7449    case kMathAbs:
7450    case kMathSqrt:
7451    case kMathLog:
7452      if (argument_count == 2 && check_type == RECEIVER_MAP_CHECK) {
7453        AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
7454        HValue* argument = Pop();
7455        Drop(1);  // Receiver.
7456        HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
7457        ast_context()->ReturnInstruction(op, expr->id());
7458        return true;
7459      }
7460      break;
7461    case kMathPow:
7462      if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
7463        AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
7464        HValue* right = Pop();
7465        HValue* left = Pop();
7466        Pop();  // Pop receiver.
7467        HInstruction* result = NULL;
7468        // Use sqrt() if exponent is 0.5 or -0.5.
7469        if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
7470          double exponent = HConstant::cast(right)->DoubleValue();
7471          if (exponent == 0.5) {
7472            result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
7473          } else if (exponent == -0.5) {
7474            HValue* one = graph()->GetConstant1();
7475            HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
7476                left, kMathPowHalf);
7477            // MathPowHalf doesn't have side effects so there's no need for
7478            // an environment simulation here.
7479            ASSERT(!sqrt->HasObservableSideEffects());
7480            result = NewUncasted<HDiv>(one, sqrt);
7481          } else if (exponent == 2.0) {
7482            result = NewUncasted<HMul>(left, left);
7483          }
7484        }
7485
7486        if (result == NULL) {
7487          result = NewUncasted<HPower>(left, right);
7488        }
7489        ast_context()->ReturnInstruction(result, expr->id());
7490        return true;
7491      }
7492      break;
7493    case kMathMax:
7494    case kMathMin:
7495      if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
7496        AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
7497        HValue* right = Pop();
7498        HValue* left = Pop();
7499        Drop(1);  // Receiver.
7500        HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
7501                                                     : HMathMinMax::kMathMax;
7502        HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
7503        ast_context()->ReturnInstruction(result, expr->id());
7504        return true;
7505      }
7506      break;
7507    case kMathImul:
7508      if (argument_count == 3 && check_type == RECEIVER_MAP_CHECK) {
7509        AddCheckConstantFunction(expr->holder(), receiver, receiver_map);
7510        HValue* right = Pop();
7511        HValue* left = Pop();
7512        Drop(1);  // Receiver.
7513        HInstruction* result = HMul::NewImul(zone(), context(), left, right);
7514        ast_context()->ReturnInstruction(result, expr->id());
7515        return true;
7516      }
7517      break;
7518    default:
7519      // Not yet supported for inlining.
7520      break;
7521  }
7522  return false;
7523}
7524
7525
7526bool HOptimizedGraphBuilder::TryCallApply(Call* expr) {
7527  Expression* callee = expr->expression();
7528  Property* prop = callee->AsProperty();
7529  ASSERT(prop != NULL);
7530
7531  if (!expr->IsMonomorphic() || expr->check_type() != RECEIVER_MAP_CHECK) {
7532    return false;
7533  }
7534  Handle<Map> function_map = expr->GetReceiverTypes()->first();
7535  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
7536      !expr->target()->shared()->HasBuiltinFunctionId() ||
7537      expr->target()->shared()->builtin_function_id() != kFunctionApply) {
7538    return false;
7539  }
7540
7541  if (current_info()->scope()->arguments() == NULL) return false;
7542
7543  ZoneList<Expression*>* args = expr->arguments();
7544  if (args->length() != 2) return false;
7545
7546  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
7547  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
7548  HValue* arg_two_value = LookupAndMakeLive(arg_two->var());
7549  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
7550
7551  // Found pattern f.apply(receiver, arguments).
7552  CHECK_ALIVE_OR_RETURN(VisitForValue(prop->obj()), true);
7553  HValue* function = Top();
7554
7555  AddCheckConstantFunction(expr->holder(), function, function_map);
7556  Drop(1);
7557
7558  CHECK_ALIVE_OR_RETURN(VisitForValue(args->at(0)), true);
7559  HValue* receiver = Pop();
7560
7561  if (function_state()->outer() == NULL) {
7562    HInstruction* elements = Add<HArgumentsElements>(false);
7563    HInstruction* length = Add<HArgumentsLength>(elements);
7564    HValue* wrapped_receiver = BuildWrapReceiver(receiver, function);
7565    HInstruction* result = New<HApplyArguments>(function,
7566                                                wrapped_receiver,
7567                                                length,
7568                                                elements);
7569    ast_context()->ReturnInstruction(result, expr->id());
7570    return true;
7571  } else {
7572    // We are inside inlined function and we know exactly what is inside
7573    // arguments object. But we need to be able to materialize at deopt.
7574    ASSERT_EQ(environment()->arguments_environment()->parameter_count(),
7575              function_state()->entry()->arguments_object()->arguments_count());
7576    HArgumentsObject* args = function_state()->entry()->arguments_object();
7577    const ZoneList<HValue*>* arguments_values = args->arguments_values();
7578    int arguments_count = arguments_values->length();
7579    Push(BuildWrapReceiver(receiver, function));
7580    for (int i = 1; i < arguments_count; i++) {
7581      Push(arguments_values->at(i));
7582    }
7583
7584    Handle<JSFunction> known_function;
7585    if (function->IsConstant() &&
7586        HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
7587      known_function = Handle<JSFunction>::cast(
7588          HConstant::cast(function)->handle(isolate()));
7589      int args_count = arguments_count - 1;  // Excluding receiver.
7590      if (TryInlineApply(known_function, expr, args_count)) return true;
7591    }
7592
7593    Drop(arguments_count - 1);
7594    Push(Add<HPushArgument>(Pop()));
7595    for (int i = 1; i < arguments_count; i++) {
7596      Push(Add<HPushArgument>(arguments_values->at(i)));
7597    }
7598
7599    HInvokeFunction* call = New<HInvokeFunction>(function,
7600                                                 known_function,
7601                                                 arguments_count);
7602    Drop(arguments_count);
7603    ast_context()->ReturnInstruction(call, expr->id());
7604    return true;
7605  }
7606}
7607
7608
7609void HOptimizedGraphBuilder::VisitCall(Call* expr) {
7610  ASSERT(!HasStackOverflow());
7611  ASSERT(current_block() != NULL);
7612  ASSERT(current_block()->HasPredecessor());
7613  Expression* callee = expr->expression();
7614  int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
7615  HInstruction* call = NULL;
7616
7617  Property* prop = callee->AsProperty();
7618  if (prop != NULL) {
7619    if (!prop->key()->IsPropertyName()) {
7620      // Keyed function call.
7621      CHECK_ALIVE(VisitForValue(prop->obj()));
7622      CHECK_ALIVE(VisitForValue(prop->key()));
7623
7624      // Push receiver and key like the non-optimized code generator expects it.
7625      HValue* key = Pop();
7626      HValue* receiver = Pop();
7627      Push(key);
7628      Push(Add<HPushArgument>(receiver));
7629      CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7630
7631      if (expr->IsMonomorphic()) {
7632        BuildCheckHeapObject(receiver);
7633        ElementsKind kind = expr->KeyedArrayCallIsHoley()
7634            ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
7635
7636        Handle<Map> map(isolate()->get_initial_js_array_map(kind));
7637
7638        HValue* function = BuildMonomorphicElementAccess(
7639            receiver, key, NULL, NULL, map, false, STANDARD_STORE);
7640
7641        call = New<HCallFunction>(function, argument_count);
7642      } else {
7643        call = New<HCallKeyed>(key, argument_count);
7644      }
7645      Drop(argument_count + 1);  // 1 is the key.
7646      return ast_context()->ReturnInstruction(call, expr->id());
7647    }
7648
7649    // Named function call.
7650    if (TryCallApply(expr)) return;
7651
7652    CHECK_ALIVE(VisitForValue(prop->obj()));
7653    CHECK_ALIVE(VisitExpressions(expr->arguments()));
7654
7655    Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
7656    HValue* receiver =
7657        environment()->ExpressionStackAt(expr->arguments()->length());
7658
7659    SmallMapList* types;
7660    bool was_monomorphic = expr->IsMonomorphic();
7661    bool monomorphic = ComputeReceiverTypes(expr, receiver, &types);
7662    if (!was_monomorphic && monomorphic) {
7663      monomorphic = expr->ComputeTarget(types->first(), name);
7664    }
7665
7666    if (monomorphic) {
7667      Handle<Map> map = types->first();
7668      if (TryInlineBuiltinMethodCall(expr, receiver, map, expr->check_type())) {
7669        if (FLAG_trace_inlining) {
7670          PrintF("Inlining builtin ");
7671          expr->target()->ShortPrint();
7672          PrintF("\n");
7673        }
7674        return;
7675      }
7676
7677      if (CallStubCompiler::HasCustomCallGenerator(expr->target()) ||
7678          expr->check_type() != RECEIVER_MAP_CHECK) {
7679        // When the target has a custom call IC generator, use the IC,
7680        // because it is likely to generate better code.  Also use the IC
7681        // when a primitive receiver check is required.
7682        call = PreProcessCall(New<HCallNamed>(name, argument_count));
7683      } else {
7684        AddCheckConstantFunction(expr->holder(), receiver, map);
7685
7686        if (TryInlineCall(expr)) return;
7687        call = PreProcessCall(
7688            New<HCallConstantFunction>(expr->target(), argument_count));
7689      }
7690    } else if (types != NULL && types->length() > 1) {
7691      ASSERT(expr->check_type() == RECEIVER_MAP_CHECK);
7692      HandlePolymorphicCallNamed(expr, receiver, types, name);
7693      return;
7694
7695    } else {
7696      call = PreProcessCall(New<HCallNamed>(name, argument_count));
7697    }
7698  } else {
7699    VariableProxy* proxy = expr->expression()->AsVariableProxy();
7700    if (proxy != NULL && proxy->var()->is_possibly_eval(isolate())) {
7701      return Bailout(kPossibleDirectCallToEval);
7702    }
7703
7704    bool global_call = proxy != NULL && proxy->var()->IsUnallocated();
7705    if (global_call) {
7706      Variable* var = proxy->var();
7707      bool known_global_function = false;
7708      // If there is a global property cell for the name at compile time and
7709      // access check is not enabled we assume that the function will not change
7710      // and generate optimized code for calling the function.
7711      LookupResult lookup(isolate());
7712      GlobalPropertyAccess type = LookupGlobalProperty(var, &lookup, false);
7713      if (type == kUseCell &&
7714          !current_info()->global_object()->IsAccessCheckNeeded()) {
7715        Handle<GlobalObject> global(current_info()->global_object());
7716        known_global_function = expr->ComputeGlobalTarget(global, &lookup);
7717      }
7718      if (known_global_function) {
7719        // Push the global object instead of the global receiver because
7720        // code generated by the full code generator expects it.
7721        HGlobalObject* global_object = Add<HGlobalObject>();
7722        Push(global_object);
7723        CHECK_ALIVE(VisitExpressions(expr->arguments()));
7724
7725        CHECK_ALIVE(VisitForValue(expr->expression()));
7726        HValue* function = Pop();
7727        Add<HCheckValue>(function, expr->target());
7728
7729        // Replace the global object with the global receiver.
7730        HGlobalReceiver* global_receiver = Add<HGlobalReceiver>(global_object);
7731        // Index of the receiver from the top of the expression stack.
7732        const int receiver_index = argument_count - 1;
7733        ASSERT(environment()->ExpressionStackAt(receiver_index)->
7734               IsGlobalObject());
7735        environment()->SetExpressionStackAt(receiver_index, global_receiver);
7736
7737        if (TryInlineBuiltinFunctionCall(expr, false)) {  // Nothing to drop.
7738          if (FLAG_trace_inlining) {
7739            PrintF("Inlining builtin ");
7740            expr->target()->ShortPrint();
7741            PrintF("\n");
7742          }
7743          return;
7744        }
7745        if (TryInlineCall(expr)) return;
7746
7747        if (expr->target().is_identical_to(current_info()->closure())) {
7748          graph()->MarkRecursive();
7749        }
7750
7751        if (CallStubCompiler::HasCustomCallGenerator(expr->target())) {
7752          // When the target has a custom call IC generator, use the IC,
7753          // because it is likely to generate better code.
7754          call = PreProcessCall(New<HCallNamed>(var->name(), argument_count));
7755        } else {
7756          call = PreProcessCall(New<HCallKnownGlobal>(
7757              expr->target(), argument_count));
7758        }
7759      } else {
7760        HGlobalObject* receiver = Add<HGlobalObject>();
7761        Push(Add<HPushArgument>(receiver));
7762        CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7763
7764        call = New<HCallGlobal>(var->name(), argument_count);
7765        Drop(argument_count);
7766      }
7767
7768    } else if (expr->IsMonomorphic()) {
7769      // The function is on the stack in the unoptimized code during
7770      // evaluation of the arguments.
7771      CHECK_ALIVE(VisitForValue(expr->expression()));
7772      HValue* function = Top();
7773      HGlobalObject* global = Add<HGlobalObject>();
7774      HGlobalReceiver* receiver = Add<HGlobalReceiver>(global);
7775      Push(receiver);
7776      CHECK_ALIVE(VisitExpressions(expr->arguments()));
7777      Add<HCheckValue>(function, expr->target());
7778
7779      if (TryInlineBuiltinFunctionCall(expr, true)) {  // Drop the function.
7780        if (FLAG_trace_inlining) {
7781          PrintF("Inlining builtin ");
7782          expr->target()->ShortPrint();
7783          PrintF("\n");
7784        }
7785        return;
7786      }
7787
7788      if (TryInlineCall(expr, true)) {   // Drop function from environment.
7789        return;
7790      } else {
7791        call = PreProcessCall(New<HInvokeFunction>(function, expr->target(),
7792                                                   argument_count));
7793        Drop(1);  // The function.
7794      }
7795
7796    } else {
7797      CHECK_ALIVE(VisitForValue(expr->expression()));
7798      HValue* function = Top();
7799      HGlobalObject* global_object = Add<HGlobalObject>();
7800      HGlobalReceiver* receiver = Add<HGlobalReceiver>(global_object);
7801      Push(Add<HPushArgument>(receiver));
7802      CHECK_ALIVE(VisitArgumentList(expr->arguments()));
7803
7804      call = New<HCallFunction>(function, argument_count);
7805      Drop(argument_count + 1);
7806    }
7807  }
7808
7809  return ast_context()->ReturnInstruction(call, expr->id());
7810}
7811
7812
7813void HOptimizedGraphBuilder::BuildInlinedCallNewArray(CallNew* expr) {
7814  NoObservableSideEffectsScope no_effects(this);
7815
7816  int argument_count = expr->arguments()->length();
7817  // We should at least have the constructor on the expression stack.
7818  HValue* constructor = environment()->ExpressionStackAt(argument_count);
7819
7820  ElementsKind kind = expr->elements_kind();
7821  Handle<Cell> cell = expr->allocation_info_cell();
7822  AllocationSite* site = AllocationSite::cast(cell->value());
7823
7824  // Register on the site for deoptimization if the cell value changes.
7825  site->AddDependentCompilationInfo(AllocationSite::TRANSITIONS, top_info());
7826  HInstruction* cell_instruction = Add<HConstant>(cell);
7827
7828  // In the single constant argument case, we may have to adjust elements kind
7829  // to avoid creating a packed non-empty array.
7830  if (argument_count == 1 && !IsHoleyElementsKind(kind)) {
7831    HValue* argument = environment()->Top();
7832    if (argument->IsConstant()) {
7833      HConstant* constant_argument = HConstant::cast(argument);
7834      ASSERT(constant_argument->HasSmiValue());
7835      int constant_array_size = constant_argument->Integer32Value();
7836      if (constant_array_size != 0) {
7837        kind = GetHoleyElementsKind(kind);
7838      }
7839    }
7840  }
7841
7842  // Build the array.
7843  JSArrayBuilder array_builder(this,
7844                               kind,
7845                               cell_instruction,
7846                               constructor,
7847                               DISABLE_ALLOCATION_SITES);
7848  HValue* new_object;
7849  if (argument_count == 0) {
7850    new_object = array_builder.AllocateEmptyArray();
7851  } else if (argument_count == 1) {
7852    HValue* argument = environment()->Top();
7853    new_object = BuildAllocateArrayFromLength(&array_builder, argument);
7854  } else {
7855    HValue* length = Add<HConstant>(argument_count);
7856    // Smi arrays need to initialize array elements with the hole because
7857    // bailout could occur if the arguments don't fit in a smi.
7858    //
7859    // TODO(mvstanton): If all the arguments are constants in smi range, then
7860    // we could set fill_with_hole to false and save a few instructions.
7861    JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind)
7862        ? JSArrayBuilder::FILL_WITH_HOLE
7863        : JSArrayBuilder::DONT_FILL_WITH_HOLE;
7864    new_object = array_builder.AllocateArray(length, length, fill_mode);
7865    HValue* elements = array_builder.GetElementsLocation();
7866    for (int i = 0; i < argument_count; i++) {
7867      HValue* value = environment()->ExpressionStackAt(argument_count - i - 1);
7868      HValue* constant_i = Add<HConstant>(i);
7869      Add<HStoreKeyed>(elements, constant_i, value, kind);
7870    }
7871  }
7872
7873  Drop(argument_count + 1);  // drop constructor and args.
7874  ast_context()->ReturnValue(new_object);
7875}
7876
7877
7878// Checks whether allocation using the given constructor can be inlined.
7879static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
7880  return constructor->has_initial_map() &&
7881      constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
7882      constructor->initial_map()->instance_size() < HAllocate::kMaxInlineSize &&
7883      constructor->initial_map()->InitialPropertiesLength() == 0;
7884}
7885
7886
7887bool HOptimizedGraphBuilder::IsCallNewArrayInlineable(CallNew* expr) {
7888  bool inline_ok = false;
7889  Handle<JSFunction> caller = current_info()->closure();
7890  Handle<JSFunction> target(isolate()->global_context()->array_function(),
7891                            isolate());
7892  int argument_count = expr->arguments()->length();
7893  // We should have the function plus array arguments on the environment stack.
7894  ASSERT(environment()->length() >= (argument_count + 1));
7895  Handle<Cell> cell = expr->allocation_info_cell();
7896  AllocationSite* site = AllocationSite::cast(cell->value());
7897  if (site->CanInlineCall()) {
7898    // We also want to avoid inlining in certain 1 argument scenarios.
7899    if (argument_count == 1) {
7900      HValue* argument = Top();
7901      if (argument->IsConstant()) {
7902        // Do not inline if the constant length argument is not a smi or
7903        // outside the valid range for a fast array.
7904        HConstant* constant_argument = HConstant::cast(argument);
7905        if (constant_argument->HasSmiValue()) {
7906          int value = constant_argument->Integer32Value();
7907          inline_ok = value >= 0 &&
7908              value < JSObject::kInitialMaxFastElementArray;
7909          if (!inline_ok) {
7910            TraceInline(target, caller,
7911                        "Length outside of valid array range");
7912          }
7913        }
7914      } else {
7915        inline_ok = true;
7916      }
7917    } else {
7918      inline_ok = true;
7919    }
7920  } else {
7921    TraceInline(target, caller, "AllocationSite requested no inlining.");
7922  }
7923
7924  if (inline_ok) {
7925    TraceInline(target, caller, NULL);
7926  }
7927  return inline_ok;
7928}
7929
7930
7931void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
7932  ASSERT(!HasStackOverflow());
7933  ASSERT(current_block() != NULL);
7934  ASSERT(current_block()->HasPredecessor());
7935  if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
7936  int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
7937  Factory* factory = isolate()->factory();
7938
7939  // The constructor function is on the stack in the unoptimized code
7940  // during evaluation of the arguments.
7941  CHECK_ALIVE(VisitForValue(expr->expression()));
7942  HValue* function = Top();
7943  CHECK_ALIVE(VisitExpressions(expr->arguments()));
7944
7945  if (FLAG_inline_construct &&
7946      expr->IsMonomorphic() &&
7947      IsAllocationInlineable(expr->target())) {
7948    Handle<JSFunction> constructor = expr->target();
7949    HValue* check = Add<HCheckValue>(function, constructor);
7950
7951    // Force completion of inobject slack tracking before generating
7952    // allocation code to finalize instance size.
7953    if (constructor->shared()->IsInobjectSlackTrackingInProgress()) {
7954      constructor->shared()->CompleteInobjectSlackTracking();
7955    }
7956
7957    // Calculate instance size from initial map of constructor.
7958    ASSERT(constructor->has_initial_map());
7959    Handle<Map> initial_map(constructor->initial_map());
7960    int instance_size = initial_map->instance_size();
7961    ASSERT(initial_map->InitialPropertiesLength() == 0);
7962
7963    // Allocate an instance of the implicit receiver object.
7964    HValue* size_in_bytes = Add<HConstant>(instance_size);
7965    PretenureFlag pretenure_flag =
7966        (FLAG_pretenuring_call_new &&
7967            isolate()->heap()->GetPretenureMode() == TENURED)
7968                ? TENURED : NOT_TENURED;
7969    HAllocate* receiver =
7970        Add<HAllocate>(size_in_bytes, HType::JSObject(), pretenure_flag,
7971        JS_OBJECT_TYPE);
7972    receiver->set_known_initial_map(initial_map);
7973
7974    // Load the initial map from the constructor.
7975    HValue* constructor_value = Add<HConstant>(constructor);
7976    HValue* initial_map_value =
7977      Add<HLoadNamedField>(constructor_value, HObjectAccess::ForJSObjectOffset(
7978            JSFunction::kPrototypeOrInitialMapOffset));
7979
7980    // Initialize map and fields of the newly allocated object.
7981    { NoObservableSideEffectsScope no_effects(this);
7982      ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE);
7983      Add<HStoreNamedField>(receiver,
7984          HObjectAccess::ForJSObjectOffset(JSObject::kMapOffset),
7985          initial_map_value);
7986      HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
7987      Add<HStoreNamedField>(receiver,
7988          HObjectAccess::ForJSObjectOffset(JSObject::kPropertiesOffset),
7989          empty_fixed_array);
7990      Add<HStoreNamedField>(receiver,
7991          HObjectAccess::ForJSObjectOffset(JSObject::kElementsOffset),
7992          empty_fixed_array);
7993      if (initial_map->inobject_properties() != 0) {
7994        HConstant* undefined = graph()->GetConstantUndefined();
7995        for (int i = 0; i < initial_map->inobject_properties(); i++) {
7996          int property_offset = JSObject::kHeaderSize + i * kPointerSize;
7997          Add<HStoreNamedField>(receiver,
7998              HObjectAccess::ForJSObjectOffset(property_offset),
7999              undefined);
8000        }
8001      }
8002    }
8003
8004    // Replace the constructor function with a newly allocated receiver using
8005    // the index of the receiver from the top of the expression stack.
8006    const int receiver_index = argument_count - 1;
8007    ASSERT(environment()->ExpressionStackAt(receiver_index) == function);
8008    environment()->SetExpressionStackAt(receiver_index, receiver);
8009
8010    if (TryInlineConstruct(expr, receiver)) return;
8011
8012    // TODO(mstarzinger): For now we remove the previous HAllocate and all
8013    // corresponding instructions and instead add HPushArgument for the
8014    // arguments in case inlining failed.  What we actually should do is for
8015    // inlining to try to build a subgraph without mutating the parent graph.
8016    HInstruction* instr = current_block()->last();
8017    while (instr != initial_map_value) {
8018      HInstruction* prev_instr = instr->previous();
8019      instr->DeleteAndReplaceWith(NULL);
8020      instr = prev_instr;
8021    }
8022    initial_map_value->DeleteAndReplaceWith(NULL);
8023    receiver->DeleteAndReplaceWith(NULL);
8024    check->DeleteAndReplaceWith(NULL);
8025    environment()->SetExpressionStackAt(receiver_index, function);
8026    HInstruction* call =
8027      PreProcessCall(New<HCallNew>(function, argument_count));
8028    return ast_context()->ReturnInstruction(call, expr->id());
8029  } else {
8030    // The constructor function is both an operand to the instruction and an
8031    // argument to the construct call.
8032    Handle<JSFunction> array_function(
8033        isolate()->global_context()->array_function(), isolate());
8034    bool use_call_new_array = expr->target().is_identical_to(array_function);
8035    Handle<Cell> cell = expr->allocation_info_cell();
8036    if (use_call_new_array && IsCallNewArrayInlineable(expr)) {
8037      // Verify we are still calling the array function for our native context.
8038      Add<HCheckValue>(function, array_function);
8039      BuildInlinedCallNewArray(expr);
8040      return;
8041    }
8042
8043    HBinaryCall* call;
8044    if (use_call_new_array) {
8045      Add<HCheckValue>(function, array_function);
8046      call = New<HCallNewArray>(function, argument_count, cell,
8047                                expr->elements_kind());
8048    } else {
8049      call = New<HCallNew>(function, argument_count);
8050    }
8051    PreProcessCall(call);
8052    return ast_context()->ReturnInstruction(call, expr->id());
8053  }
8054}
8055
8056
8057// Support for generating inlined runtime functions.
8058
8059// Lookup table for generators for runtime calls that are generated inline.
8060// Elements of the table are member pointers to functions of
8061// HOptimizedGraphBuilder.
8062#define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize)  \
8063    &HOptimizedGraphBuilder::Generate##Name,
8064
8065const HOptimizedGraphBuilder::InlineFunctionGenerator
8066    HOptimizedGraphBuilder::kInlineFunctionGenerators[] = {
8067        INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8068        INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
8069};
8070#undef INLINE_FUNCTION_GENERATOR_ADDRESS
8071
8072
8073template <class ViewClass>
8074void HGraphBuilder::BuildArrayBufferViewInitialization(
8075    HValue* obj,
8076    HValue* buffer,
8077    HValue* byte_offset,
8078    HValue* byte_length) {
8079
8080  for (int offset = ViewClass::kSize;
8081       offset < ViewClass::kSizeWithInternalFields;
8082       offset += kPointerSize) {
8083    Add<HStoreNamedField>(obj,
8084        HObjectAccess::ForJSObjectOffset(offset),
8085        Add<HConstant>(static_cast<int32_t>(0)));
8086  }
8087
8088  Add<HStoreNamedField>(
8089      obj,
8090      HObjectAccess::ForJSArrayBufferViewBuffer(), buffer);
8091  Add<HStoreNamedField>(
8092      obj,
8093      HObjectAccess::ForJSArrayBufferViewByteOffset(),
8094      byte_offset);
8095  Add<HStoreNamedField>(
8096      obj,
8097      HObjectAccess::ForJSArrayBufferViewByteLength(),
8098      byte_length);
8099
8100  HObjectAccess weak_first_view_access =
8101      HObjectAccess::ForJSArrayBufferWeakFirstView();
8102  Add<HStoreNamedField>(obj,
8103      HObjectAccess::ForJSArrayBufferViewWeakNext(),
8104      Add<HLoadNamedField>(buffer, weak_first_view_access));
8105  Add<HStoreNamedField>(buffer, weak_first_view_access, obj);
8106}
8107
8108
8109void HOptimizedGraphBuilder::VisitDataViewInitialize(
8110    CallRuntime* expr) {
8111  ZoneList<Expression*>* arguments = expr->arguments();
8112
8113  NoObservableSideEffectsScope scope(this);
8114  ASSERT(arguments->length()== 4);
8115  CHECK_ALIVE(VisitForValue(arguments->at(0)));
8116  HValue* obj = Pop();
8117
8118  CHECK_ALIVE(VisitForValue(arguments->at(1)));
8119  HValue* buffer = Pop();
8120
8121  CHECK_ALIVE(VisitForValue(arguments->at(2)));
8122  HValue* byte_offset = Pop();
8123
8124  CHECK_ALIVE(VisitForValue(arguments->at(3)));
8125  HValue* byte_length = Pop();
8126
8127  BuildArrayBufferViewInitialization<JSDataView>(
8128      obj, buffer, byte_offset, byte_length);
8129}
8130
8131
8132void HOptimizedGraphBuilder::VisitTypedArrayInitialize(
8133    CallRuntime* expr) {
8134  ZoneList<Expression*>* arguments = expr->arguments();
8135
8136  NoObservableSideEffectsScope scope(this);
8137  static const int kObjectArg = 0;
8138  static const int kArrayIdArg = 1;
8139  static const int kBufferArg = 2;
8140  static const int kByteOffsetArg = 3;
8141  static const int kByteLengthArg = 4;
8142  static const int kArgsLength = 5;
8143  ASSERT(arguments->length() == kArgsLength);
8144
8145
8146  CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
8147  HValue* obj = Pop();
8148
8149  ASSERT(arguments->at(kArrayIdArg)->node_type() == AstNode::kLiteral);
8150  Handle<Object> value =
8151      static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
8152  ASSERT(value->IsSmi());
8153  int array_id = Smi::cast(*value)->value();
8154
8155  CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
8156  HValue* buffer = Pop();
8157
8158  HValue* byte_offset;
8159  bool is_zero_byte_offset;
8160
8161  if (arguments->at(kByteOffsetArg)->node_type() == AstNode::kLiteral
8162      && Smi::FromInt(0) ==
8163      *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
8164    byte_offset = Add<HConstant>(static_cast<int32_t>(0));
8165    is_zero_byte_offset = true;
8166  } else {
8167    CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
8168    byte_offset = Pop();
8169    is_zero_byte_offset = false;
8170  }
8171
8172  CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
8173  HValue* byte_length = Pop();
8174
8175  IfBuilder byte_offset_smi(this);
8176
8177  if (!is_zero_byte_offset) {
8178    byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
8179    byte_offset_smi.Then();
8180  }
8181
8182  { //  byte_offset is Smi.
8183    BuildArrayBufferViewInitialization<JSTypedArray>(
8184        obj, buffer, byte_offset, byte_length);
8185
8186    ExternalArrayType array_type = kExternalByteArray;  // Bogus initialization.
8187    size_t element_size = 1;  // Bogus initialization.
8188    Runtime::ArrayIdToTypeAndSize(array_id, &array_type, &element_size);
8189
8190    HInstruction* length = AddUncasted<HDiv>(byte_length,
8191        Add<HConstant>(static_cast<int32_t>(element_size)));
8192
8193    Add<HStoreNamedField>(obj,
8194        HObjectAccess::ForJSTypedArrayLength(),
8195        length);
8196
8197    HValue* elements =
8198        Add<HAllocate>(
8199            Add<HConstant>(ExternalArray::kAlignedSize),
8200            HType::JSArray(),
8201            NOT_TENURED,
8202            static_cast<InstanceType>(FIRST_EXTERNAL_ARRAY_TYPE + array_type));
8203
8204    Handle<Map> external_array_map(
8205        isolate()->heap()->MapForExternalArrayType(array_type));
8206    Add<HStoreNamedField>(elements,
8207        HObjectAccess::ForMap(),
8208        Add<HConstant>(external_array_map));
8209
8210    HValue* backing_store = Add<HLoadNamedField>(
8211        buffer, HObjectAccess::ForJSArrayBufferBackingStore());
8212
8213    HValue* typed_array_start;
8214    if (is_zero_byte_offset) {
8215      typed_array_start = backing_store;
8216    } else {
8217      HInstruction* external_pointer =
8218          AddUncasted<HAdd>(backing_store, byte_offset);
8219      // Arguments are checked prior to call to TypedArrayInitialize,
8220      // including byte_offset.
8221      external_pointer->ClearFlag(HValue::kCanOverflow);
8222      typed_array_start = external_pointer;
8223    }
8224
8225    Add<HStoreNamedField>(elements,
8226        HObjectAccess::ForExternalArrayExternalPointer(),
8227        typed_array_start);
8228    Add<HStoreNamedField>(elements,
8229        HObjectAccess::ForFixedArrayLength(),
8230        length);
8231    Add<HStoreNamedField>(
8232        obj, HObjectAccess::ForElementsPointer(), elements);
8233  }
8234
8235  if (!is_zero_byte_offset) {
8236    byte_offset_smi.Else();
8237    { //  byte_offset is not Smi.
8238      Push(Add<HPushArgument>(obj));
8239      VisitArgument(arguments->at(kArrayIdArg));
8240      Push(Add<HPushArgument>(buffer));
8241      Push(Add<HPushArgument>(byte_offset));
8242      Push(Add<HPushArgument>(byte_length));
8243      Add<HCallRuntime>(expr->name(), expr->function(), kArgsLength);
8244      Drop(kArgsLength);
8245    }
8246  }
8247  byte_offset_smi.End();
8248}
8249
8250
8251void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
8252  ASSERT(!HasStackOverflow());
8253  ASSERT(current_block() != NULL);
8254  ASSERT(current_block()->HasPredecessor());
8255  if (expr->is_jsruntime()) {
8256    return Bailout(kCallToAJavaScriptRuntimeFunction);
8257  }
8258
8259  const Runtime::Function* function = expr->function();
8260  ASSERT(function != NULL);
8261
8262  if (function->function_id == Runtime::kDataViewInitialize) {
8263      return VisitDataViewInitialize(expr);
8264  }
8265
8266  if (function->function_id == Runtime::kTypedArrayInitialize) {
8267    return VisitTypedArrayInitialize(expr);
8268  }
8269
8270  if (function->function_id == Runtime::kMaxSmi) {
8271    ASSERT(expr->arguments()->length() == 0);
8272    HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
8273    return ast_context()->ReturnInstruction(max_smi, expr->id());
8274  }
8275
8276  if (function->intrinsic_type == Runtime::INLINE) {
8277    ASSERT(expr->name()->length() > 0);
8278    ASSERT(expr->name()->Get(0) == '_');
8279    // Call to an inline function.
8280    int lookup_index = static_cast<int>(function->function_id) -
8281        static_cast<int>(Runtime::kFirstInlineFunction);
8282    ASSERT(lookup_index >= 0);
8283    ASSERT(static_cast<size_t>(lookup_index) <
8284           ARRAY_SIZE(kInlineFunctionGenerators));
8285    InlineFunctionGenerator generator = kInlineFunctionGenerators[lookup_index];
8286
8287    // Call the inline code generator using the pointer-to-member.
8288    (this->*generator)(expr);
8289  } else {
8290    ASSERT(function->intrinsic_type == Runtime::RUNTIME);
8291    CHECK_ALIVE(VisitArgumentList(expr->arguments()));
8292
8293    Handle<String> name = expr->name();
8294    int argument_count = expr->arguments()->length();
8295    HCallRuntime* call = New<HCallRuntime>(name, function,
8296                                           argument_count);
8297    Drop(argument_count);
8298    return ast_context()->ReturnInstruction(call, expr->id());
8299  }
8300}
8301
8302
8303void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
8304  ASSERT(!HasStackOverflow());
8305  ASSERT(current_block() != NULL);
8306  ASSERT(current_block()->HasPredecessor());
8307  switch (expr->op()) {
8308    case Token::DELETE: return VisitDelete(expr);
8309    case Token::VOID: return VisitVoid(expr);
8310    case Token::TYPEOF: return VisitTypeof(expr);
8311    case Token::NOT: return VisitNot(expr);
8312    default: UNREACHABLE();
8313  }
8314}
8315
8316
8317void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
8318  Property* prop = expr->expression()->AsProperty();
8319  VariableProxy* proxy = expr->expression()->AsVariableProxy();
8320  if (prop != NULL) {
8321    CHECK_ALIVE(VisitForValue(prop->obj()));
8322    CHECK_ALIVE(VisitForValue(prop->key()));
8323    HValue* key = Pop();
8324    HValue* obj = Pop();
8325    HValue* function = AddLoadJSBuiltin(Builtins::DELETE);
8326    Add<HPushArgument>(obj);
8327    Add<HPushArgument>(key);
8328    Add<HPushArgument>(Add<HConstant>(function_strict_mode_flag()));
8329    // TODO(olivf) InvokeFunction produces a check for the parameter count,
8330    // even though we are certain to pass the correct number of arguments here.
8331    HInstruction* instr = New<HInvokeFunction>(function, 3);
8332    return ast_context()->ReturnInstruction(instr, expr->id());
8333  } else if (proxy != NULL) {
8334    Variable* var = proxy->var();
8335    if (var->IsUnallocated()) {
8336      Bailout(kDeleteWithGlobalVariable);
8337    } else if (var->IsStackAllocated() || var->IsContextSlot()) {
8338      // Result of deleting non-global variables is false.  'this' is not
8339      // really a variable, though we implement it as one.  The
8340      // subexpression does not have side effects.
8341      HValue* value = var->is_this()
8342          ? graph()->GetConstantTrue()
8343          : graph()->GetConstantFalse();
8344      return ast_context()->ReturnValue(value);
8345    } else {
8346      Bailout(kDeleteWithNonGlobalVariable);
8347    }
8348  } else {
8349    // Result of deleting non-property, non-variable reference is true.
8350    // Evaluate the subexpression for side effects.
8351    CHECK_ALIVE(VisitForEffect(expr->expression()));
8352    return ast_context()->ReturnValue(graph()->GetConstantTrue());
8353  }
8354}
8355
8356
8357void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
8358  CHECK_ALIVE(VisitForEffect(expr->expression()));
8359  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
8360}
8361
8362
8363void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
8364  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
8365  HValue* value = Pop();
8366  HInstruction* instr = New<HTypeof>(value);
8367  return ast_context()->ReturnInstruction(instr, expr->id());
8368}
8369
8370
8371void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
8372  if (ast_context()->IsTest()) {
8373    TestContext* context = TestContext::cast(ast_context());
8374    VisitForControl(expr->expression(),
8375                    context->if_false(),
8376                    context->if_true());
8377    return;
8378  }
8379
8380  if (ast_context()->IsEffect()) {
8381    VisitForEffect(expr->expression());
8382    return;
8383  }
8384
8385  ASSERT(ast_context()->IsValue());
8386  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
8387  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
8388  CHECK_BAILOUT(VisitForControl(expr->expression(),
8389                                materialize_false,
8390                                materialize_true));
8391
8392  if (materialize_false->HasPredecessor()) {
8393    materialize_false->SetJoinId(expr->MaterializeFalseId());
8394    set_current_block(materialize_false);
8395    Push(graph()->GetConstantFalse());
8396  } else {
8397    materialize_false = NULL;
8398  }
8399
8400  if (materialize_true->HasPredecessor()) {
8401    materialize_true->SetJoinId(expr->MaterializeTrueId());
8402    set_current_block(materialize_true);
8403    Push(graph()->GetConstantTrue());
8404  } else {
8405    materialize_true = NULL;
8406  }
8407
8408  HBasicBlock* join =
8409    CreateJoin(materialize_false, materialize_true, expr->id());
8410  set_current_block(join);
8411  if (join != NULL) return ast_context()->ReturnValue(Pop());
8412}
8413
8414
8415HInstruction* HOptimizedGraphBuilder::BuildIncrement(
8416    bool returns_original_input,
8417    CountOperation* expr) {
8418  // The input to the count operation is on top of the expression stack.
8419  Handle<Type> info = expr->type();
8420  Representation rep = Representation::FromType(info);
8421  if (rep.IsNone() || rep.IsTagged()) {
8422    rep = Representation::Smi();
8423  }
8424
8425  if (returns_original_input) {
8426    // We need an explicit HValue representing ToNumber(input).  The
8427    // actual HChange instruction we need is (sometimes) added in a later
8428    // phase, so it is not available now to be used as an input to HAdd and
8429    // as the return value.
8430    HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
8431    if (!rep.IsDouble()) {
8432      number_input->SetFlag(HInstruction::kFlexibleRepresentation);
8433      number_input->SetFlag(HInstruction::kCannotBeTagged);
8434    }
8435    Push(number_input);
8436  }
8437
8438  // The addition has no side effects, so we do not need
8439  // to simulate the expression stack after this instruction.
8440  // Any later failures deopt to the load of the input or earlier.
8441  HConstant* delta = (expr->op() == Token::INC)
8442      ? graph()->GetConstant1()
8443      : graph()->GetConstantMinus1();
8444  HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
8445  if (instr->IsAdd()) {
8446    HAdd* add = HAdd::cast(instr);
8447    add->set_observed_input_representation(1, rep);
8448    add->set_observed_input_representation(2, Representation::Smi());
8449  }
8450  instr->SetFlag(HInstruction::kCannotBeTagged);
8451  instr->ClearAllSideEffects();
8452  return instr;
8453}
8454
8455
8456void HOptimizedGraphBuilder::BuildStoreForEffect(Expression* expr,
8457                                                 Property* prop,
8458                                                 BailoutId ast_id,
8459                                                 BailoutId return_id,
8460                                                 HValue* object,
8461                                                 HValue* key,
8462                                                 HValue* value) {
8463  EffectContext for_effect(this);
8464  Push(object);
8465  if (key != NULL) Push(key);
8466  Push(value);
8467  BuildStore(expr, prop, ast_id, return_id);
8468}
8469
8470
8471void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
8472  ASSERT(!HasStackOverflow());
8473  ASSERT(current_block() != NULL);
8474  ASSERT(current_block()->HasPredecessor());
8475  if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
8476  Expression* target = expr->expression();
8477  VariableProxy* proxy = target->AsVariableProxy();
8478  Property* prop = target->AsProperty();
8479  if (proxy == NULL && prop == NULL) {
8480    return Bailout(kInvalidLhsInCountOperation);
8481  }
8482
8483  // Match the full code generator stack by simulating an extra stack
8484  // element for postfix operations in a non-effect context.  The return
8485  // value is ToNumber(input).
8486  bool returns_original_input =
8487      expr->is_postfix() && !ast_context()->IsEffect();
8488  HValue* input = NULL;  // ToNumber(original_input).
8489  HValue* after = NULL;  // The result after incrementing or decrementing.
8490
8491  if (proxy != NULL) {
8492    Variable* var = proxy->var();
8493    if (var->mode() == CONST)  {
8494      return Bailout(kUnsupportedCountOperationWithConst);
8495    }
8496    // Argument of the count operation is a variable, not a property.
8497    ASSERT(prop == NULL);
8498    CHECK_ALIVE(VisitForValue(target));
8499
8500    after = BuildIncrement(returns_original_input, expr);
8501    input = returns_original_input ? Top() : Pop();
8502    Push(after);
8503
8504    switch (var->location()) {
8505      case Variable::UNALLOCATED:
8506        HandleGlobalVariableAssignment(var,
8507                                       after,
8508                                       expr->AssignmentId());
8509        break;
8510
8511      case Variable::PARAMETER:
8512      case Variable::LOCAL:
8513        BindIfLive(var, after);
8514        break;
8515
8516      case Variable::CONTEXT: {
8517        // Bail out if we try to mutate a parameter value in a function
8518        // using the arguments object.  We do not (yet) correctly handle the
8519        // arguments property of the function.
8520        if (current_info()->scope()->arguments() != NULL) {
8521          // Parameters will rewrite to context slots.  We have no direct
8522          // way to detect that the variable is a parameter so we use a
8523          // linear search of the parameter list.
8524          int count = current_info()->scope()->num_parameters();
8525          for (int i = 0; i < count; ++i) {
8526            if (var == current_info()->scope()->parameter(i)) {
8527              return Bailout(kAssignmentToParameterInArgumentsObject);
8528            }
8529          }
8530        }
8531
8532        HValue* context = BuildContextChainWalk(var);
8533        HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
8534            ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
8535        HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
8536                                                          mode, after);
8537        if (instr->HasObservableSideEffects()) {
8538          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
8539        }
8540        break;
8541      }
8542
8543      case Variable::LOOKUP:
8544        return Bailout(kLookupVariableInCountOperation);
8545    }
8546
8547    Drop(returns_original_input ? 2 : 1);
8548    return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
8549  }
8550
8551  // Argument of the count operation is a property.
8552  ASSERT(prop != NULL);
8553  if (returns_original_input) Push(graph()->GetConstantUndefined());
8554
8555  CHECK_ALIVE(VisitForValue(prop->obj()));
8556  HValue* object = Top();
8557
8558  HValue* key = NULL;
8559  if ((!prop->IsFunctionPrototype() && !prop->key()->IsPropertyName()) ||
8560      prop->IsStringAccess()) {
8561    CHECK_ALIVE(VisitForValue(prop->key()));
8562    key = Top();
8563  }
8564
8565  CHECK_ALIVE(PushLoad(prop, object, key));
8566
8567  after = BuildIncrement(returns_original_input, expr);
8568
8569  if (returns_original_input) {
8570    input = Pop();
8571    // Drop object and key to push it again in the effect context below.
8572    Drop(key == NULL ? 1 : 2);
8573    environment()->SetExpressionStackAt(0, input);
8574    CHECK_ALIVE(BuildStoreForEffect(
8575        expr, prop, expr->id(), expr->AssignmentId(), object, key, after));
8576    return ast_context()->ReturnValue(Pop());
8577  }
8578
8579  environment()->SetExpressionStackAt(0, after);
8580  return BuildStore(expr, prop, expr->id(), expr->AssignmentId());
8581}
8582
8583
8584HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
8585    HValue* string,
8586    HValue* index) {
8587  if (string->IsConstant() && index->IsConstant()) {
8588    HConstant* c_string = HConstant::cast(string);
8589    HConstant* c_index = HConstant::cast(index);
8590    if (c_string->HasStringValue() && c_index->HasNumberValue()) {
8591      int32_t i = c_index->NumberValueAsInteger32();
8592      Handle<String> s = c_string->StringValue();
8593      if (i < 0 || i >= s->length()) {
8594        return New<HConstant>(OS::nan_value());
8595      }
8596      return New<HConstant>(s->Get(i));
8597    }
8598  }
8599  BuildCheckHeapObject(string);
8600  HValue* checkstring =
8601      Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
8602  HInstruction* length = BuildLoadStringLength(string, checkstring);
8603  AddInstruction(length);
8604  HInstruction* checked_index = Add<HBoundsCheck>(index, length);
8605  return New<HStringCharCodeAt>(string, checked_index);
8606}
8607
8608
8609// Checks if the given shift amounts have following forms:
8610// (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
8611static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
8612                                             HValue* const32_minus_sa) {
8613  if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
8614    const HConstant* c1 = HConstant::cast(sa);
8615    const HConstant* c2 = HConstant::cast(const32_minus_sa);
8616    return c1->HasInteger32Value() && c2->HasInteger32Value() &&
8617        (c1->Integer32Value() + c2->Integer32Value() == 32);
8618  }
8619  if (!const32_minus_sa->IsSub()) return false;
8620  HSub* sub = HSub::cast(const32_minus_sa);
8621  if (sa != sub->right()) return false;
8622  HValue* const32 = sub->left();
8623  if (!const32->IsConstant() ||
8624      HConstant::cast(const32)->Integer32Value() != 32) {
8625    return false;
8626  }
8627  return (sub->right() == sa);
8628}
8629
8630
8631// Checks if the left and the right are shift instructions with the oposite
8632// directions that can be replaced by one rotate right instruction or not.
8633// Returns the operand and the shift amount for the rotate instruction in the
8634// former case.
8635bool HGraphBuilder::MatchRotateRight(HValue* left,
8636                                     HValue* right,
8637                                     HValue** operand,
8638                                     HValue** shift_amount) {
8639  HShl* shl;
8640  HShr* shr;
8641  if (left->IsShl() && right->IsShr()) {
8642    shl = HShl::cast(left);
8643    shr = HShr::cast(right);
8644  } else if (left->IsShr() && right->IsShl()) {
8645    shl = HShl::cast(right);
8646    shr = HShr::cast(left);
8647  } else {
8648    return false;
8649  }
8650  if (shl->left() != shr->left()) return false;
8651
8652  if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
8653      !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
8654    return false;
8655  }
8656  *operand= shr->left();
8657  *shift_amount = shr->right();
8658  return true;
8659}
8660
8661
8662bool CanBeZero(HValue* right) {
8663  if (right->IsConstant()) {
8664    HConstant* right_const = HConstant::cast(right);
8665    if (right_const->HasInteger32Value() &&
8666       (right_const->Integer32Value() & 0x1f) != 0) {
8667      return false;
8668    }
8669  }
8670  return true;
8671}
8672
8673
8674HValue* HGraphBuilder::EnforceNumberType(HValue* number,
8675                                         Handle<Type> expected) {
8676  if (expected->Is(Type::Smi())) {
8677    return AddUncasted<HForceRepresentation>(number, Representation::Smi());
8678  }
8679  if (expected->Is(Type::Signed32())) {
8680    return AddUncasted<HForceRepresentation>(number,
8681                                             Representation::Integer32());
8682  }
8683  return number;
8684}
8685
8686
8687HValue* HGraphBuilder::TruncateToNumber(HValue* value, Handle<Type>* expected) {
8688  if (value->IsConstant()) {
8689    HConstant* constant = HConstant::cast(value);
8690    Maybe<HConstant*> number = constant->CopyToTruncatedNumber(zone());
8691    if (number.has_value) {
8692      *expected = handle(Type::Number(), isolate());
8693      return AddInstruction(number.value);
8694    }
8695  }
8696
8697  // We put temporary values on the stack, which don't correspond to anything
8698  // in baseline code. Since nothing is observable we avoid recording those
8699  // pushes with a NoObservableSideEffectsScope.
8700  NoObservableSideEffectsScope no_effects(this);
8701
8702  Handle<Type> expected_type = *expected;
8703
8704  // Separate the number type from the rest.
8705  Handle<Type> expected_obj = handle(Type::Intersect(
8706      expected_type, handle(Type::NonNumber(), isolate())), isolate());
8707  Handle<Type> expected_number = handle(Type::Intersect(
8708      expected_type, handle(Type::Number(), isolate())), isolate());
8709
8710  // We expect to get a number.
8711  // (We need to check first, since Type::None->Is(Type::Any()) == true.
8712  if (expected_obj->Is(Type::None())) {
8713    ASSERT(!expected_number->Is(Type::None()));
8714    return value;
8715  }
8716
8717  if (expected_obj->Is(Type::Undefined())) {
8718    // This is already done by HChange.
8719    *expected = handle(Type::Union(
8720          expected_number, handle(Type::Double(), isolate())), isolate());
8721    return value;
8722  }
8723
8724  return value;
8725}
8726
8727
8728HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
8729    BinaryOperation* expr,
8730    HValue* left,
8731    HValue* right) {
8732  Handle<Type> left_type = expr->left()->bounds().lower;
8733  Handle<Type> right_type = expr->right()->bounds().lower;
8734  Handle<Type> result_type = expr->bounds().lower;
8735  Maybe<int> fixed_right_arg = expr->fixed_right_arg();
8736
8737  HValue* result = HGraphBuilder::BuildBinaryOperation(
8738      expr->op(), left, right, left_type, right_type,
8739      result_type, fixed_right_arg);
8740  // Add a simulate after instructions with observable side effects, and
8741  // after phis, which are the result of BuildBinaryOperation when we
8742  // inlined some complex subgraph.
8743  if (result->HasObservableSideEffects() || result->IsPhi()) {
8744    Push(result);
8745    Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
8746    Drop(1);
8747  }
8748  return result;
8749}
8750
8751
8752HValue* HGraphBuilder::BuildBinaryOperation(
8753    Token::Value op,
8754    HValue* left,
8755    HValue* right,
8756    Handle<Type> left_type,
8757    Handle<Type> right_type,
8758    Handle<Type> result_type,
8759    Maybe<int> fixed_right_arg) {
8760
8761  Representation left_rep = Representation::FromType(left_type);
8762  Representation right_rep = Representation::FromType(right_type);
8763
8764  bool maybe_string_add = op == Token::ADD &&
8765                          (left_type->Maybe(Type::String()) ||
8766                           right_type->Maybe(Type::String()));
8767
8768  if (left_type->Is(Type::None())) {
8769    Add<HDeoptimize>("Insufficient type feedback for LHS of binary operation",
8770                     Deoptimizer::SOFT);
8771    // TODO(rossberg): we should be able to get rid of non-continuous
8772    // defaults.
8773    left_type = handle(Type::Any(), isolate());
8774  } else {
8775    if (!maybe_string_add) left = TruncateToNumber(left, &left_type);
8776    left_rep = Representation::FromType(left_type);
8777  }
8778
8779  if (right_type->Is(Type::None())) {
8780    Add<HDeoptimize>("Insufficient type feedback for RHS of binary operation",
8781                     Deoptimizer::SOFT);
8782    right_type = handle(Type::Any(), isolate());
8783  } else {
8784    if (!maybe_string_add) right = TruncateToNumber(right, &right_type);
8785    right_rep = Representation::FromType(right_type);
8786  }
8787
8788  // Special case for string addition here.
8789  if (op == Token::ADD &&
8790      (left_type->Is(Type::String()) || right_type->Is(Type::String()))) {
8791    // Validate type feedback for left argument.
8792    if (left_type->Is(Type::String())) {
8793      left = BuildCheckString(left);
8794    }
8795
8796    // Validate type feedback for right argument.
8797    if (right_type->Is(Type::String())) {
8798      right = BuildCheckString(right);
8799    }
8800
8801    // Convert left argument as necessary.
8802    if (left_type->Is(Type::Number())) {
8803      ASSERT(right_type->Is(Type::String()));
8804      left = BuildNumberToString(left, left_type);
8805    } else if (!left_type->Is(Type::String())) {
8806      ASSERT(right_type->Is(Type::String()));
8807      HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_RIGHT);
8808      Add<HPushArgument>(left);
8809      Add<HPushArgument>(right);
8810      return AddUncasted<HInvokeFunction>(function, 2);
8811    }
8812
8813    // Convert right argument as necessary.
8814    if (right_type->Is(Type::Number())) {
8815      ASSERT(left_type->Is(Type::String()));
8816      right = BuildNumberToString(right, right_type);
8817    } else if (!right_type->Is(Type::String())) {
8818      ASSERT(left_type->Is(Type::String()));
8819      HValue* function = AddLoadJSBuiltin(Builtins::STRING_ADD_LEFT);
8820      Add<HPushArgument>(left);
8821      Add<HPushArgument>(right);
8822      return AddUncasted<HInvokeFunction>(function, 2);
8823    }
8824
8825    return AddUncasted<HStringAdd>(left, right, STRING_ADD_CHECK_NONE);
8826  }
8827
8828  if (graph()->info()->IsStub()) {
8829    left = EnforceNumberType(left, left_type);
8830    right = EnforceNumberType(right, right_type);
8831  }
8832
8833  Representation result_rep = Representation::FromType(result_type);
8834
8835  bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
8836                          (right_rep.IsTagged() && !right_rep.IsSmi());
8837
8838  HInstruction* instr = NULL;
8839  // Only the stub is allowed to call into the runtime, since otherwise we would
8840  // inline several instructions (including the two pushes) for every tagged
8841  // operation in optimized code, which is more expensive, than a stub call.
8842  if (graph()->info()->IsStub() && is_non_primitive) {
8843    HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
8844    Add<HPushArgument>(left);
8845    Add<HPushArgument>(right);
8846    instr = AddUncasted<HInvokeFunction>(function, 2);
8847  } else {
8848    switch (op) {
8849      case Token::ADD:
8850        instr = AddUncasted<HAdd>(left, right);
8851        break;
8852      case Token::SUB:
8853        instr = AddUncasted<HSub>(left, right);
8854        break;
8855      case Token::MUL:
8856        instr = AddUncasted<HMul>(left, right);
8857        break;
8858      case Token::MOD: {
8859        if (fixed_right_arg.has_value) {
8860          if (right->IsConstant()) {
8861            HConstant* c_right = HConstant::cast(right);
8862            if (c_right->HasInteger32Value()) {
8863              ASSERT_EQ(fixed_right_arg.value, c_right->Integer32Value());
8864            }
8865          } else {
8866            HConstant* fixed_right = Add<HConstant>(
8867                static_cast<int>(fixed_right_arg.value));
8868            IfBuilder if_same(this);
8869            if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
8870            if_same.Then();
8871            if_same.ElseDeopt("Unexpected RHS of binary operation");
8872            right = fixed_right;
8873          }
8874        }
8875        instr = AddUncasted<HMod>(left, right);
8876        break;
8877      }
8878      case Token::DIV:
8879        instr = AddUncasted<HDiv>(left, right);
8880        break;
8881      case Token::BIT_XOR:
8882      case Token::BIT_AND:
8883        instr = AddUncasted<HBitwise>(op, left, right);
8884        break;
8885      case Token::BIT_OR: {
8886        HValue* operand, *shift_amount;
8887        if (left_type->Is(Type::Signed32()) &&
8888            right_type->Is(Type::Signed32()) &&
8889            MatchRotateRight(left, right, &operand, &shift_amount)) {
8890          instr = AddUncasted<HRor>(operand, shift_amount);
8891        } else {
8892          instr = AddUncasted<HBitwise>(op, left, right);
8893        }
8894        break;
8895      }
8896      case Token::SAR:
8897        instr = AddUncasted<HSar>(left, right);
8898        break;
8899      case Token::SHR:
8900        instr = AddUncasted<HShr>(left, right);
8901        if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
8902            CanBeZero(right)) {
8903          graph()->RecordUint32Instruction(instr);
8904        }
8905        break;
8906      case Token::SHL:
8907        instr = AddUncasted<HShl>(left, right);
8908        break;
8909      default:
8910        UNREACHABLE();
8911    }
8912  }
8913
8914  if (instr->IsBinaryOperation()) {
8915    HBinaryOperation* binop = HBinaryOperation::cast(instr);
8916    binop->set_observed_input_representation(1, left_rep);
8917    binop->set_observed_input_representation(2, right_rep);
8918    binop->initialize_output_representation(result_rep);
8919    if (graph()->info()->IsStub()) {
8920      // Stub should not call into stub.
8921      instr->SetFlag(HValue::kCannotBeTagged);
8922      // And should truncate on HForceRepresentation already.
8923      if (left->IsForceRepresentation()) {
8924        left->CopyFlag(HValue::kTruncatingToSmi, instr);
8925        left->CopyFlag(HValue::kTruncatingToInt32, instr);
8926      }
8927      if (right->IsForceRepresentation()) {
8928        right->CopyFlag(HValue::kTruncatingToSmi, instr);
8929        right->CopyFlag(HValue::kTruncatingToInt32, instr);
8930      }
8931    }
8932  }
8933  return instr;
8934}
8935
8936
8937// Check for the form (%_ClassOf(foo) === 'BarClass').
8938static bool IsClassOfTest(CompareOperation* expr) {
8939  if (expr->op() != Token::EQ_STRICT) return false;
8940  CallRuntime* call = expr->left()->AsCallRuntime();
8941  if (call == NULL) return false;
8942  Literal* literal = expr->right()->AsLiteral();
8943  if (literal == NULL) return false;
8944  if (!literal->value()->IsString()) return false;
8945  if (!call->name()->IsOneByteEqualTo(STATIC_ASCII_VECTOR("_ClassOf"))) {
8946    return false;
8947  }
8948  ASSERT(call->arguments()->length() == 1);
8949  return true;
8950}
8951
8952
8953void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
8954  ASSERT(!HasStackOverflow());
8955  ASSERT(current_block() != NULL);
8956  ASSERT(current_block()->HasPredecessor());
8957  switch (expr->op()) {
8958    case Token::COMMA:
8959      return VisitComma(expr);
8960    case Token::OR:
8961    case Token::AND:
8962      return VisitLogicalExpression(expr);
8963    default:
8964      return VisitArithmeticExpression(expr);
8965  }
8966}
8967
8968
8969void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
8970  CHECK_ALIVE(VisitForEffect(expr->left()));
8971  // Visit the right subexpression in the same AST context as the entire
8972  // expression.
8973  Visit(expr->right());
8974}
8975
8976
8977void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
8978  bool is_logical_and = expr->op() == Token::AND;
8979  if (ast_context()->IsTest()) {
8980    TestContext* context = TestContext::cast(ast_context());
8981    // Translate left subexpression.
8982    HBasicBlock* eval_right = graph()->CreateBasicBlock();
8983    if (is_logical_and) {
8984      CHECK_BAILOUT(VisitForControl(expr->left(),
8985                                    eval_right,
8986                                    context->if_false()));
8987    } else {
8988      CHECK_BAILOUT(VisitForControl(expr->left(),
8989                                    context->if_true(),
8990                                    eval_right));
8991    }
8992
8993    // Translate right subexpression by visiting it in the same AST
8994    // context as the entire expression.
8995    if (eval_right->HasPredecessor()) {
8996      eval_right->SetJoinId(expr->RightId());
8997      set_current_block(eval_right);
8998      Visit(expr->right());
8999    }
9000
9001  } else if (ast_context()->IsValue()) {
9002    CHECK_ALIVE(VisitForValue(expr->left()));
9003    ASSERT(current_block() != NULL);
9004    HValue* left_value = Top();
9005
9006    // Short-circuit left values that always evaluate to the same boolean value.
9007    if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
9008      // l (evals true)  && r -> r
9009      // l (evals true)  || r -> l
9010      // l (evals false) && r -> l
9011      // l (evals false) || r -> r
9012      if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
9013        Drop(1);
9014        CHECK_ALIVE(VisitForValue(expr->right()));
9015      }
9016      return ast_context()->ReturnValue(Pop());
9017    }
9018
9019    // We need an extra block to maintain edge-split form.
9020    HBasicBlock* empty_block = graph()->CreateBasicBlock();
9021    HBasicBlock* eval_right = graph()->CreateBasicBlock();
9022    ToBooleanStub::Types expected(expr->left()->to_boolean_types());
9023    HBranch* test = is_logical_and
9024        ? New<HBranch>(left_value, expected, eval_right, empty_block)
9025        : New<HBranch>(left_value, expected, empty_block, eval_right);
9026    FinishCurrentBlock(test);
9027
9028    set_current_block(eval_right);
9029    Drop(1);  // Value of the left subexpression.
9030    CHECK_BAILOUT(VisitForValue(expr->right()));
9031
9032    HBasicBlock* join_block =
9033      CreateJoin(empty_block, current_block(), expr->id());
9034    set_current_block(join_block);
9035    return ast_context()->ReturnValue(Pop());
9036
9037  } else {
9038    ASSERT(ast_context()->IsEffect());
9039    // In an effect context, we don't need the value of the left subexpression,
9040    // only its control flow and side effects.  We need an extra block to
9041    // maintain edge-split form.
9042    HBasicBlock* empty_block = graph()->CreateBasicBlock();
9043    HBasicBlock* right_block = graph()->CreateBasicBlock();
9044    if (is_logical_and) {
9045      CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
9046    } else {
9047      CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
9048    }
9049
9050    // TODO(kmillikin): Find a way to fix this.  It's ugly that there are
9051    // actually two empty blocks (one here and one inserted by
9052    // TestContext::BuildBranch, and that they both have an HSimulate though the
9053    // second one is not a merge node, and that we really have no good AST ID to
9054    // put on that first HSimulate.
9055
9056    if (empty_block->HasPredecessor()) {
9057      empty_block->SetJoinId(expr->id());
9058    } else {
9059      empty_block = NULL;
9060    }
9061
9062    if (right_block->HasPredecessor()) {
9063      right_block->SetJoinId(expr->RightId());
9064      set_current_block(right_block);
9065      CHECK_BAILOUT(VisitForEffect(expr->right()));
9066      right_block = current_block();
9067    } else {
9068      right_block = NULL;
9069    }
9070
9071    HBasicBlock* join_block =
9072      CreateJoin(empty_block, right_block, expr->id());
9073    set_current_block(join_block);
9074    // We did not materialize any value in the predecessor environments,
9075    // so there is no need to handle it here.
9076  }
9077}
9078
9079
9080void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
9081  CHECK_ALIVE(VisitForValue(expr->left()));
9082  CHECK_ALIVE(VisitForValue(expr->right()));
9083  SetSourcePosition(expr->position());
9084  HValue* right = Pop();
9085  HValue* left = Pop();
9086  HValue* result = BuildBinaryOperation(expr, left, right);
9087  if (FLAG_emit_opt_code_positions && result->IsBinaryOperation()) {
9088    HBinaryOperation::cast(result)->SetOperandPositions(
9089        zone(), expr->left()->position(), expr->right()->position());
9090  }
9091  return ast_context()->ReturnValue(result);
9092}
9093
9094
9095void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
9096                                                        Expression* sub_expr,
9097                                                        Handle<String> check) {
9098  CHECK_ALIVE(VisitForTypeOf(sub_expr));
9099  SetSourcePosition(expr->position());
9100  HValue* value = Pop();
9101  HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
9102  return ast_context()->ReturnControl(instr, expr->id());
9103}
9104
9105
9106static bool IsLiteralCompareBool(Isolate* isolate,
9107                                 HValue* left,
9108                                 Token::Value op,
9109                                 HValue* right) {
9110  return op == Token::EQ_STRICT &&
9111      ((left->IsConstant() &&
9112          HConstant::cast(left)->handle(isolate)->IsBoolean()) ||
9113       (right->IsConstant() &&
9114           HConstant::cast(right)->handle(isolate)->IsBoolean()));
9115}
9116
9117
9118void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
9119  ASSERT(!HasStackOverflow());
9120  ASSERT(current_block() != NULL);
9121  ASSERT(current_block()->HasPredecessor());
9122
9123  if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
9124
9125  // Check for a few fast cases. The AST visiting behavior must be in sync
9126  // with the full codegen: We don't push both left and right values onto
9127  // the expression stack when one side is a special-case literal.
9128  Expression* sub_expr = NULL;
9129  Handle<String> check;
9130  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
9131    return HandleLiteralCompareTypeof(expr, sub_expr, check);
9132  }
9133  if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
9134    return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
9135  }
9136  if (expr->IsLiteralCompareNull(&sub_expr)) {
9137    return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
9138  }
9139
9140  if (IsClassOfTest(expr)) {
9141    CallRuntime* call = expr->left()->AsCallRuntime();
9142    ASSERT(call->arguments()->length() == 1);
9143    CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9144    HValue* value = Pop();
9145    Literal* literal = expr->right()->AsLiteral();
9146    Handle<String> rhs = Handle<String>::cast(literal->value());
9147    HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
9148    return ast_context()->ReturnControl(instr, expr->id());
9149  }
9150
9151  Handle<Type> left_type = expr->left()->bounds().lower;
9152  Handle<Type> right_type = expr->right()->bounds().lower;
9153  Handle<Type> combined_type = expr->combined_type();
9154  Representation combined_rep = Representation::FromType(combined_type);
9155  Representation left_rep = Representation::FromType(left_type);
9156  Representation right_rep = Representation::FromType(right_type);
9157
9158  CHECK_ALIVE(VisitForValue(expr->left()));
9159  CHECK_ALIVE(VisitForValue(expr->right()));
9160
9161  if (FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
9162
9163  HValue* right = Pop();
9164  HValue* left = Pop();
9165  Token::Value op = expr->op();
9166
9167  if (IsLiteralCompareBool(isolate(), left, op, right)) {
9168    HCompareObjectEqAndBranch* result =
9169        New<HCompareObjectEqAndBranch>(left, right);
9170    return ast_context()->ReturnControl(result, expr->id());
9171  }
9172
9173  if (op == Token::INSTANCEOF) {
9174    // Check to see if the rhs of the instanceof is a global function not
9175    // residing in new space. If it is we assume that the function will stay the
9176    // same.
9177    Handle<JSFunction> target = Handle<JSFunction>::null();
9178    VariableProxy* proxy = expr->right()->AsVariableProxy();
9179    bool global_function = (proxy != NULL) && proxy->var()->IsUnallocated();
9180    if (global_function &&
9181        current_info()->has_global_object() &&
9182        !current_info()->global_object()->IsAccessCheckNeeded()) {
9183      Handle<String> name = proxy->name();
9184      Handle<GlobalObject> global(current_info()->global_object());
9185      LookupResult lookup(isolate());
9186      global->Lookup(*name, &lookup);
9187      if (lookup.IsNormal() && lookup.GetValue()->IsJSFunction()) {
9188        Handle<JSFunction> candidate(JSFunction::cast(lookup.GetValue()));
9189        // If the function is in new space we assume it's more likely to
9190        // change and thus prefer the general IC code.
9191        if (!isolate()->heap()->InNewSpace(*candidate)) {
9192          target = candidate;
9193        }
9194      }
9195    }
9196
9197    // If the target is not null we have found a known global function that is
9198    // assumed to stay the same for this instanceof.
9199    if (target.is_null()) {
9200      HInstanceOf* result = New<HInstanceOf>(left, right);
9201      return ast_context()->ReturnInstruction(result, expr->id());
9202    } else {
9203      Add<HCheckValue>(right, target);
9204      HInstanceOfKnownGlobal* result =
9205        New<HInstanceOfKnownGlobal>(left, target);
9206      return ast_context()->ReturnInstruction(result, expr->id());
9207    }
9208
9209    // Code below assumes that we don't fall through.
9210    UNREACHABLE();
9211  } else if (op == Token::IN) {
9212    HValue* function = AddLoadJSBuiltin(Builtins::IN);
9213    Add<HPushArgument>(left);
9214    Add<HPushArgument>(right);
9215    // TODO(olivf) InvokeFunction produces a check for the parameter count,
9216    // even though we are certain to pass the correct number of arguments here.
9217    HInstruction* result = New<HInvokeFunction>(function, 2);
9218    return ast_context()->ReturnInstruction(result, expr->id());
9219  }
9220
9221  // Cases handled below depend on collected type feedback. They should
9222  // soft deoptimize when there is no type feedback.
9223  if (combined_type->Is(Type::None())) {
9224    Add<HDeoptimize>("Insufficient type feedback for combined type "
9225                     "of binary operation",
9226                     Deoptimizer::SOFT);
9227    combined_type = left_type = right_type = handle(Type::Any(), isolate());
9228  }
9229
9230  if (combined_type->Is(Type::Receiver())) {
9231    switch (op) {
9232      case Token::EQ:
9233      case Token::EQ_STRICT: {
9234        // Can we get away with map check and not instance type check?
9235        if (combined_type->IsClass()) {
9236          Handle<Map> map = combined_type->AsClass();
9237          AddCheckMap(left, map);
9238          AddCheckMap(right, map);
9239          HCompareObjectEqAndBranch* result =
9240              New<HCompareObjectEqAndBranch>(left, right);
9241          if (FLAG_emit_opt_code_positions) {
9242            result->set_operand_position(zone(), 0, expr->left()->position());
9243            result->set_operand_position(zone(), 1, expr->right()->position());
9244          }
9245          return ast_context()->ReturnControl(result, expr->id());
9246        } else {
9247          BuildCheckHeapObject(left);
9248          Add<HCheckInstanceType>(left, HCheckInstanceType::IS_SPEC_OBJECT);
9249          BuildCheckHeapObject(right);
9250          Add<HCheckInstanceType>(right, HCheckInstanceType::IS_SPEC_OBJECT);
9251          HCompareObjectEqAndBranch* result =
9252              New<HCompareObjectEqAndBranch>(left, right);
9253          return ast_context()->ReturnControl(result, expr->id());
9254        }
9255      }
9256      default:
9257        return Bailout(kUnsupportedNonPrimitiveCompare);
9258    }
9259  } else if (combined_type->Is(Type::InternalizedString()) &&
9260             Token::IsEqualityOp(op)) {
9261    BuildCheckHeapObject(left);
9262    Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
9263    BuildCheckHeapObject(right);
9264    Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
9265    HCompareObjectEqAndBranch* result =
9266        New<HCompareObjectEqAndBranch>(left, right);
9267    return ast_context()->ReturnControl(result, expr->id());
9268  } else if (combined_type->Is(Type::String())) {
9269    BuildCheckHeapObject(left);
9270    Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
9271    BuildCheckHeapObject(right);
9272    Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
9273    HStringCompareAndBranch* result =
9274        New<HStringCompareAndBranch>(left, right, op);
9275    return ast_context()->ReturnControl(result, expr->id());
9276  } else {
9277    if (combined_rep.IsTagged() || combined_rep.IsNone()) {
9278      HCompareGeneric* result = New<HCompareGeneric>(left, right, op);
9279      result->set_observed_input_representation(1, left_rep);
9280      result->set_observed_input_representation(2, right_rep);
9281      return ast_context()->ReturnInstruction(result, expr->id());
9282    } else {
9283      HCompareNumericAndBranch* result =
9284          New<HCompareNumericAndBranch>(left, right, op);
9285      result->set_observed_input_representation(left_rep, right_rep);
9286      if (FLAG_emit_opt_code_positions) {
9287        result->SetOperandPositions(zone(),
9288                                    expr->left()->position(),
9289                                    expr->right()->position());
9290      }
9291      return ast_context()->ReturnControl(result, expr->id());
9292    }
9293  }
9294}
9295
9296
9297void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
9298                                                     Expression* sub_expr,
9299                                                     NilValue nil) {
9300  ASSERT(!HasStackOverflow());
9301  ASSERT(current_block() != NULL);
9302  ASSERT(current_block()->HasPredecessor());
9303  ASSERT(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
9304  if (!FLAG_emit_opt_code_positions) SetSourcePosition(expr->position());
9305  CHECK_ALIVE(VisitForValue(sub_expr));
9306  HValue* value = Pop();
9307  if (expr->op() == Token::EQ_STRICT) {
9308    HConstant* nil_constant = nil == kNullValue
9309        ? graph()->GetConstantNull()
9310        : graph()->GetConstantUndefined();
9311    HCompareObjectEqAndBranch* instr =
9312        New<HCompareObjectEqAndBranch>(value, nil_constant);
9313    return ast_context()->ReturnControl(instr, expr->id());
9314  } else {
9315    ASSERT_EQ(Token::EQ, expr->op());
9316    Handle<Type> type = expr->combined_type()->Is(Type::None())
9317        ? handle(Type::Any(), isolate_)
9318        : expr->combined_type();
9319    HIfContinuation continuation;
9320    BuildCompareNil(value, type, &continuation);
9321    return ast_context()->ReturnContinuation(&continuation, expr->id());
9322  }
9323}
9324
9325
9326HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
9327  // If we share optimized code between different closures, the
9328  // this-function is not a constant, except inside an inlined body.
9329  if (function_state()->outer() != NULL) {
9330      return New<HConstant>(
9331          function_state()->compilation_info()->closure());
9332  } else {
9333      return New<HThisFunction>();
9334  }
9335}
9336
9337
9338HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
9339    Handle<JSObject> boilerplate_object,
9340    AllocationSiteUsageContext* site_context) {
9341  NoObservableSideEffectsScope no_effects(this);
9342  InstanceType instance_type = boilerplate_object->map()->instance_type();
9343  ASSERT(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
9344
9345  HType type = instance_type == JS_ARRAY_TYPE
9346      ? HType::JSArray() : HType::JSObject();
9347  HValue* object_size_constant = Add<HConstant>(
9348      boilerplate_object->map()->instance_size());
9349
9350  // We should pull pre-tenure mode from the allocation site.
9351  // For now, just see what it says, and remark on it if it sez
9352  // we should pretenure. That means the rudimentary counting in the garbage
9353  // collector is having an effect.
9354  PretenureFlag pretenure_flag = isolate()->heap()->GetPretenureMode();
9355  if (FLAG_allocation_site_pretenuring) {
9356    pretenure_flag = site_context->current()->GetPretenureMode()
9357        ? TENURED
9358        : NOT_TENURED;
9359  }
9360
9361  HInstruction* object = Add<HAllocate>(object_size_constant, type,
9362      pretenure_flag, instance_type, site_context->current());
9363
9364  BuildEmitObjectHeader(boilerplate_object, object);
9365
9366  Handle<FixedArrayBase> elements(boilerplate_object->elements());
9367  int elements_size = (elements->length() > 0 &&
9368      elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
9369          elements->Size() : 0;
9370
9371  HInstruction* object_elements = NULL;
9372  if (elements_size > 0) {
9373    HValue* object_elements_size = Add<HConstant>(elements_size);
9374    if (boilerplate_object->HasFastDoubleElements()) {
9375      object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
9376          pretenure_flag, FIXED_DOUBLE_ARRAY_TYPE, site_context->current());
9377    } else {
9378      object_elements = Add<HAllocate>(object_elements_size, HType::JSObject(),
9379          pretenure_flag, FIXED_ARRAY_TYPE, site_context->current());
9380    }
9381  }
9382  BuildInitElementsInObjectHeader(boilerplate_object, object, object_elements);
9383
9384  // Copy object elements if non-COW.
9385  if (object_elements != NULL) {
9386    BuildEmitElements(boilerplate_object, elements, object_elements,
9387                      site_context);
9388  }
9389
9390  // Copy in-object properties.
9391  if (boilerplate_object->map()->NumberOfFields() != 0) {
9392    BuildEmitInObjectProperties(boilerplate_object, object, site_context,
9393                                pretenure_flag);
9394  }
9395  return object;
9396}
9397
9398
9399void HOptimizedGraphBuilder::BuildEmitObjectHeader(
9400    Handle<JSObject> boilerplate_object,
9401    HInstruction* object) {
9402  ASSERT(boilerplate_object->properties()->length() == 0);
9403
9404  Handle<Map> boilerplate_object_map(boilerplate_object->map());
9405  AddStoreMapConstant(object, boilerplate_object_map);
9406
9407  Handle<Object> properties_field =
9408      Handle<Object>(boilerplate_object->properties(), isolate());
9409  ASSERT(*properties_field == isolate()->heap()->empty_fixed_array());
9410  HInstruction* properties = Add<HConstant>(properties_field);
9411  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
9412  Add<HStoreNamedField>(object, access, properties);
9413
9414  if (boilerplate_object->IsJSArray()) {
9415    Handle<JSArray> boilerplate_array =
9416        Handle<JSArray>::cast(boilerplate_object);
9417    Handle<Object> length_field =
9418        Handle<Object>(boilerplate_array->length(), isolate());
9419    HInstruction* length = Add<HConstant>(length_field);
9420
9421    ASSERT(boilerplate_array->length()->IsSmi());
9422    Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
9423        boilerplate_array->GetElementsKind()), length);
9424  }
9425}
9426
9427
9428void HOptimizedGraphBuilder::BuildInitElementsInObjectHeader(
9429    Handle<JSObject> boilerplate_object,
9430    HInstruction* object,
9431    HInstruction* object_elements) {
9432  ASSERT(boilerplate_object->properties()->length() == 0);
9433  if (object_elements == NULL) {
9434    Handle<Object> elements_field =
9435        Handle<Object>(boilerplate_object->elements(), isolate());
9436    object_elements = Add<HConstant>(elements_field);
9437  }
9438  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
9439      object_elements);
9440}
9441
9442
9443void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
9444    Handle<JSObject> boilerplate_object,
9445    HInstruction* object,
9446    AllocationSiteUsageContext* site_context,
9447    PretenureFlag pretenure_flag) {
9448  Handle<DescriptorArray> descriptors(
9449      boilerplate_object->map()->instance_descriptors());
9450  int limit = boilerplate_object->map()->NumberOfOwnDescriptors();
9451
9452  int copied_fields = 0;
9453  for (int i = 0; i < limit; i++) {
9454    PropertyDetails details = descriptors->GetDetails(i);
9455    if (details.type() != FIELD) continue;
9456    copied_fields++;
9457    int index = descriptors->GetFieldIndex(i);
9458    int property_offset = boilerplate_object->GetInObjectPropertyOffset(index);
9459    Handle<Name> name(descriptors->GetKey(i));
9460    Handle<Object> value =
9461        Handle<Object>(boilerplate_object->InObjectPropertyAt(index),
9462        isolate());
9463
9464    // The access for the store depends on the type of the boilerplate.
9465    HObjectAccess access = boilerplate_object->IsJSArray() ?
9466        HObjectAccess::ForJSArrayOffset(property_offset) :
9467        HObjectAccess::ForJSObjectOffset(property_offset);
9468
9469    if (value->IsJSObject()) {
9470      Handle<JSObject> value_object = Handle<JSObject>::cast(value);
9471      Handle<AllocationSite> current_site = site_context->EnterNewScope();
9472      HInstruction* result =
9473          BuildFastLiteral(value_object, site_context);
9474      site_context->ExitScope(current_site, value_object);
9475      Add<HStoreNamedField>(object, access, result);
9476    } else {
9477      Representation representation = details.representation();
9478      HInstruction* value_instruction;
9479
9480      if (representation.IsDouble()) {
9481        // Allocate a HeapNumber box and store the value into it.
9482        HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
9483        // This heap number alloc does not have a corresponding
9484        // AllocationSite. That is okay because
9485        // 1) it's a child object of another object with a valid allocation site
9486        // 2) we can just use the mode of the parent object for pretenuring
9487        HInstruction* double_box =
9488            Add<HAllocate>(heap_number_constant, HType::HeapNumber(),
9489                pretenure_flag, HEAP_NUMBER_TYPE);
9490        AddStoreMapConstant(double_box,
9491            isolate()->factory()->heap_number_map());
9492        Add<HStoreNamedField>(double_box, HObjectAccess::ForHeapNumberValue(),
9493                              Add<HConstant>(value));
9494        value_instruction = double_box;
9495      } else if (representation.IsSmi()) {
9496        value_instruction = value->IsUninitialized()
9497            ? graph()->GetConstant0()
9498            : Add<HConstant>(value);
9499        // Ensure that value is stored as smi.
9500        access = access.WithRepresentation(representation);
9501      } else {
9502        value_instruction = Add<HConstant>(value);
9503      }
9504
9505      Add<HStoreNamedField>(object, access, value_instruction);
9506    }
9507  }
9508
9509  int inobject_properties = boilerplate_object->map()->inobject_properties();
9510  HInstruction* value_instruction =
9511      Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
9512  for (int i = copied_fields; i < inobject_properties; i++) {
9513    ASSERT(boilerplate_object->IsJSObject());
9514    int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
9515    HObjectAccess access = HObjectAccess::ForJSObjectOffset(property_offset);
9516    Add<HStoreNamedField>(object, access, value_instruction);
9517  }
9518}
9519
9520
9521void HOptimizedGraphBuilder::BuildEmitElements(
9522    Handle<JSObject> boilerplate_object,
9523    Handle<FixedArrayBase> elements,
9524    HValue* object_elements,
9525    AllocationSiteUsageContext* site_context) {
9526  ElementsKind kind = boilerplate_object->map()->elements_kind();
9527  int elements_length = elements->length();
9528  HValue* object_elements_length = Add<HConstant>(elements_length);
9529  BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
9530
9531  // Copy elements backing store content.
9532  if (elements->IsFixedDoubleArray()) {
9533    BuildEmitFixedDoubleArray(elements, kind, object_elements);
9534  } else if (elements->IsFixedArray()) {
9535    BuildEmitFixedArray(elements, kind, object_elements,
9536                        site_context);
9537  } else {
9538    UNREACHABLE();
9539  }
9540}
9541
9542
9543void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
9544    Handle<FixedArrayBase> elements,
9545    ElementsKind kind,
9546    HValue* object_elements) {
9547  HInstruction* boilerplate_elements = Add<HConstant>(elements);
9548  int elements_length = elements->length();
9549  for (int i = 0; i < elements_length; i++) {
9550    HValue* key_constant = Add<HConstant>(i);
9551    HInstruction* value_instruction =
9552        Add<HLoadKeyed>(boilerplate_elements, key_constant,
9553                        static_cast<HValue*>(NULL), kind,
9554                        ALLOW_RETURN_HOLE);
9555    HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
9556                                           value_instruction, kind);
9557    store->SetFlag(HValue::kAllowUndefinedAsNaN);
9558  }
9559}
9560
9561
9562void HOptimizedGraphBuilder::BuildEmitFixedArray(
9563    Handle<FixedArrayBase> elements,
9564    ElementsKind kind,
9565    HValue* object_elements,
9566    AllocationSiteUsageContext* site_context) {
9567  HInstruction* boilerplate_elements = Add<HConstant>(elements);
9568  int elements_length = elements->length();
9569  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
9570  for (int i = 0; i < elements_length; i++) {
9571    Handle<Object> value(fast_elements->get(i), isolate());
9572    HValue* key_constant = Add<HConstant>(i);
9573    if (value->IsJSObject()) {
9574      Handle<JSObject> value_object = Handle<JSObject>::cast(value);
9575      Handle<AllocationSite> current_site = site_context->EnterNewScope();
9576      HInstruction* result =
9577          BuildFastLiteral(value_object, site_context);
9578      site_context->ExitScope(current_site, value_object);
9579      Add<HStoreKeyed>(object_elements, key_constant, result, kind);
9580    } else {
9581      HInstruction* value_instruction =
9582          Add<HLoadKeyed>(boilerplate_elements, key_constant,
9583                          static_cast<HValue*>(NULL), kind,
9584                          ALLOW_RETURN_HOLE);
9585      Add<HStoreKeyed>(object_elements, key_constant, value_instruction, kind);
9586    }
9587  }
9588}
9589
9590
9591void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
9592  ASSERT(!HasStackOverflow());
9593  ASSERT(current_block() != NULL);
9594  ASSERT(current_block()->HasPredecessor());
9595  HInstruction* instr = BuildThisFunction();
9596  return ast_context()->ReturnInstruction(instr, expr->id());
9597}
9598
9599
9600void HOptimizedGraphBuilder::VisitDeclarations(
9601    ZoneList<Declaration*>* declarations) {
9602  ASSERT(globals_.is_empty());
9603  AstVisitor::VisitDeclarations(declarations);
9604  if (!globals_.is_empty()) {
9605    Handle<FixedArray> array =
9606       isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
9607    for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
9608    int flags = DeclareGlobalsEvalFlag::encode(current_info()->is_eval()) |
9609        DeclareGlobalsNativeFlag::encode(current_info()->is_native()) |
9610        DeclareGlobalsLanguageMode::encode(current_info()->language_mode());
9611    Add<HDeclareGlobals>(array, flags);
9612    globals_.Clear();
9613  }
9614}
9615
9616
9617void HOptimizedGraphBuilder::VisitVariableDeclaration(
9618    VariableDeclaration* declaration) {
9619  VariableProxy* proxy = declaration->proxy();
9620  VariableMode mode = declaration->mode();
9621  Variable* variable = proxy->var();
9622  bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
9623  switch (variable->location()) {
9624    case Variable::UNALLOCATED:
9625      globals_.Add(variable->name(), zone());
9626      globals_.Add(variable->binding_needs_init()
9627                       ? isolate()->factory()->the_hole_value()
9628                       : isolate()->factory()->undefined_value(), zone());
9629      return;
9630    case Variable::PARAMETER:
9631    case Variable::LOCAL:
9632      if (hole_init) {
9633        HValue* value = graph()->GetConstantHole();
9634        environment()->Bind(variable, value);
9635      }
9636      break;
9637    case Variable::CONTEXT:
9638      if (hole_init) {
9639        HValue* value = graph()->GetConstantHole();
9640        HValue* context = environment()->context();
9641        HStoreContextSlot* store = Add<HStoreContextSlot>(
9642            context, variable->index(), HStoreContextSlot::kNoCheck, value);
9643        if (store->HasObservableSideEffects()) {
9644          Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
9645        }
9646      }
9647      break;
9648    case Variable::LOOKUP:
9649      return Bailout(kUnsupportedLookupSlotInDeclaration);
9650  }
9651}
9652
9653
9654void HOptimizedGraphBuilder::VisitFunctionDeclaration(
9655    FunctionDeclaration* declaration) {
9656  VariableProxy* proxy = declaration->proxy();
9657  Variable* variable = proxy->var();
9658  switch (variable->location()) {
9659    case Variable::UNALLOCATED: {
9660      globals_.Add(variable->name(), zone());
9661      Handle<SharedFunctionInfo> function = Compiler::BuildFunctionInfo(
9662          declaration->fun(), current_info()->script());
9663      // Check for stack-overflow exception.
9664      if (function.is_null()) return SetStackOverflow();
9665      globals_.Add(function, zone());
9666      return;
9667    }
9668    case Variable::PARAMETER:
9669    case Variable::LOCAL: {
9670      CHECK_ALIVE(VisitForValue(declaration->fun()));
9671      HValue* value = Pop();
9672      BindIfLive(variable, value);
9673      break;
9674    }
9675    case Variable::CONTEXT: {
9676      CHECK_ALIVE(VisitForValue(declaration->fun()));
9677      HValue* value = Pop();
9678      HValue* context = environment()->context();
9679      HStoreContextSlot* store = Add<HStoreContextSlot>(
9680          context, variable->index(), HStoreContextSlot::kNoCheck, value);
9681      if (store->HasObservableSideEffects()) {
9682        Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
9683      }
9684      break;
9685    }
9686    case Variable::LOOKUP:
9687      return Bailout(kUnsupportedLookupSlotInDeclaration);
9688  }
9689}
9690
9691
9692void HOptimizedGraphBuilder::VisitModuleDeclaration(
9693    ModuleDeclaration* declaration) {
9694  UNREACHABLE();
9695}
9696
9697
9698void HOptimizedGraphBuilder::VisitImportDeclaration(
9699    ImportDeclaration* declaration) {
9700  UNREACHABLE();
9701}
9702
9703
9704void HOptimizedGraphBuilder::VisitExportDeclaration(
9705    ExportDeclaration* declaration) {
9706  UNREACHABLE();
9707}
9708
9709
9710void HOptimizedGraphBuilder::VisitModuleLiteral(ModuleLiteral* module) {
9711  UNREACHABLE();
9712}
9713
9714
9715void HOptimizedGraphBuilder::VisitModuleVariable(ModuleVariable* module) {
9716  UNREACHABLE();
9717}
9718
9719
9720void HOptimizedGraphBuilder::VisitModulePath(ModulePath* module) {
9721  UNREACHABLE();
9722}
9723
9724
9725void HOptimizedGraphBuilder::VisitModuleUrl(ModuleUrl* module) {
9726  UNREACHABLE();
9727}
9728
9729
9730void HOptimizedGraphBuilder::VisitModuleStatement(ModuleStatement* stmt) {
9731  UNREACHABLE();
9732}
9733
9734
9735// Generators for inline runtime functions.
9736// Support for types.
9737void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
9738  ASSERT(call->arguments()->length() == 1);
9739  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9740  HValue* value = Pop();
9741  HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
9742  return ast_context()->ReturnControl(result, call->id());
9743}
9744
9745
9746void HOptimizedGraphBuilder::GenerateIsSpecObject(CallRuntime* call) {
9747  ASSERT(call->arguments()->length() == 1);
9748  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9749  HValue* value = Pop();
9750  HHasInstanceTypeAndBranch* result =
9751      New<HHasInstanceTypeAndBranch>(value,
9752                                     FIRST_SPEC_OBJECT_TYPE,
9753                                     LAST_SPEC_OBJECT_TYPE);
9754  return ast_context()->ReturnControl(result, call->id());
9755}
9756
9757
9758void HOptimizedGraphBuilder::GenerateIsFunction(CallRuntime* call) {
9759  ASSERT(call->arguments()->length() == 1);
9760  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9761  HValue* value = Pop();
9762  HHasInstanceTypeAndBranch* result =
9763      New<HHasInstanceTypeAndBranch>(value, JS_FUNCTION_TYPE);
9764  return ast_context()->ReturnControl(result, call->id());
9765}
9766
9767
9768void HOptimizedGraphBuilder::GenerateIsMinusZero(CallRuntime* call) {
9769  ASSERT(call->arguments()->length() == 1);
9770  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9771  HValue* value = Pop();
9772  HCompareMinusZeroAndBranch* result = New<HCompareMinusZeroAndBranch>(value);
9773  return ast_context()->ReturnControl(result, call->id());
9774}
9775
9776
9777void HOptimizedGraphBuilder::GenerateHasCachedArrayIndex(CallRuntime* call) {
9778  ASSERT(call->arguments()->length() == 1);
9779  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9780  HValue* value = Pop();
9781  HHasCachedArrayIndexAndBranch* result =
9782      New<HHasCachedArrayIndexAndBranch>(value);
9783  return ast_context()->ReturnControl(result, call->id());
9784}
9785
9786
9787void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
9788  ASSERT(call->arguments()->length() == 1);
9789  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9790  HValue* value = Pop();
9791  HHasInstanceTypeAndBranch* result =
9792      New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
9793  return ast_context()->ReturnControl(result, call->id());
9794}
9795
9796
9797void HOptimizedGraphBuilder::GenerateIsRegExp(CallRuntime* call) {
9798  ASSERT(call->arguments()->length() == 1);
9799  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9800  HValue* value = Pop();
9801  HHasInstanceTypeAndBranch* result =
9802      New<HHasInstanceTypeAndBranch>(value, JS_REGEXP_TYPE);
9803  return ast_context()->ReturnControl(result, call->id());
9804}
9805
9806
9807void HOptimizedGraphBuilder::GenerateIsObject(CallRuntime* call) {
9808  ASSERT(call->arguments()->length() == 1);
9809  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9810  HValue* value = Pop();
9811  HIsObjectAndBranch* result = New<HIsObjectAndBranch>(value);
9812  return ast_context()->ReturnControl(result, call->id());
9813}
9814
9815
9816void HOptimizedGraphBuilder::GenerateIsNonNegativeSmi(CallRuntime* call) {
9817  return Bailout(kInlinedRuntimeFunctionIsNonNegativeSmi);
9818}
9819
9820
9821void HOptimizedGraphBuilder::GenerateIsUndetectableObject(CallRuntime* call) {
9822  ASSERT(call->arguments()->length() == 1);
9823  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9824  HValue* value = Pop();
9825  HIsUndetectableAndBranch* result = New<HIsUndetectableAndBranch>(value);
9826  return ast_context()->ReturnControl(result, call->id());
9827}
9828
9829
9830void HOptimizedGraphBuilder::GenerateIsStringWrapperSafeForDefaultValueOf(
9831    CallRuntime* call) {
9832  return Bailout(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf);
9833}
9834
9835
9836// Support for construct call checks.
9837void HOptimizedGraphBuilder::GenerateIsConstructCall(CallRuntime* call) {
9838  ASSERT(call->arguments()->length() == 0);
9839  if (function_state()->outer() != NULL) {
9840    // We are generating graph for inlined function.
9841    HValue* value = function_state()->inlining_kind() == CONSTRUCT_CALL_RETURN
9842        ? graph()->GetConstantTrue()
9843        : graph()->GetConstantFalse();
9844    return ast_context()->ReturnValue(value);
9845  } else {
9846    return ast_context()->ReturnControl(New<HIsConstructCallAndBranch>(),
9847                                        call->id());
9848  }
9849}
9850
9851
9852// Support for arguments.length and arguments[?].
9853void HOptimizedGraphBuilder::GenerateArgumentsLength(CallRuntime* call) {
9854  // Our implementation of arguments (based on this stack frame or an
9855  // adapter below it) does not work for inlined functions.  This runtime
9856  // function is blacklisted by AstNode::IsInlineable.
9857  ASSERT(function_state()->outer() == NULL);
9858  ASSERT(call->arguments()->length() == 0);
9859  HInstruction* elements = Add<HArgumentsElements>(false);
9860  HArgumentsLength* result = New<HArgumentsLength>(elements);
9861  return ast_context()->ReturnInstruction(result, call->id());
9862}
9863
9864
9865void HOptimizedGraphBuilder::GenerateArguments(CallRuntime* call) {
9866  // Our implementation of arguments (based on this stack frame or an
9867  // adapter below it) does not work for inlined functions.  This runtime
9868  // function is blacklisted by AstNode::IsInlineable.
9869  ASSERT(function_state()->outer() == NULL);
9870  ASSERT(call->arguments()->length() == 1);
9871  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9872  HValue* index = Pop();
9873  HInstruction* elements = Add<HArgumentsElements>(false);
9874  HInstruction* length = Add<HArgumentsLength>(elements);
9875  HInstruction* checked_index = Add<HBoundsCheck>(index, length);
9876  HAccessArgumentsAt* result = New<HAccessArgumentsAt>(
9877      elements, length, checked_index);
9878  return ast_context()->ReturnInstruction(result, call->id());
9879}
9880
9881
9882// Support for accessing the class and value fields of an object.
9883void HOptimizedGraphBuilder::GenerateClassOf(CallRuntime* call) {
9884  // The special form detected by IsClassOfTest is detected before we get here
9885  // and does not cause a bailout.
9886  return Bailout(kInlinedRuntimeFunctionClassOf);
9887}
9888
9889
9890void HOptimizedGraphBuilder::GenerateValueOf(CallRuntime* call) {
9891  ASSERT(call->arguments()->length() == 1);
9892  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9893  HValue* value = Pop();
9894  HValueOf* result = New<HValueOf>(value);
9895  return ast_context()->ReturnInstruction(result, call->id());
9896}
9897
9898
9899void HOptimizedGraphBuilder::GenerateDateField(CallRuntime* call) {
9900  ASSERT(call->arguments()->length() == 2);
9901  ASSERT_NE(NULL, call->arguments()->at(1)->AsLiteral());
9902  Smi* index = Smi::cast(*(call->arguments()->at(1)->AsLiteral()->value()));
9903  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9904  HValue* date = Pop();
9905  HDateField* result = New<HDateField>(date, index);
9906  return ast_context()->ReturnInstruction(result, call->id());
9907}
9908
9909
9910void HOptimizedGraphBuilder::GenerateOneByteSeqStringSetChar(
9911    CallRuntime* call) {
9912  ASSERT(call->arguments()->length() == 3);
9913  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9914  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9915  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
9916  HValue* value = Pop();
9917  HValue* index = Pop();
9918  HValue* string = Pop();
9919  Add<HSeqStringSetChar>(String::ONE_BYTE_ENCODING, string,
9920                         index, value);
9921  Add<HSimulate>(call->id(), FIXED_SIMULATE);
9922  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9923}
9924
9925
9926void HOptimizedGraphBuilder::GenerateTwoByteSeqStringSetChar(
9927    CallRuntime* call) {
9928  ASSERT(call->arguments()->length() == 3);
9929  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9930  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9931  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
9932  HValue* value = Pop();
9933  HValue* index = Pop();
9934  HValue* string = Pop();
9935  Add<HSeqStringSetChar>(String::TWO_BYTE_ENCODING, string,
9936                         index, value);
9937  Add<HSimulate>(call->id(), FIXED_SIMULATE);
9938  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
9939}
9940
9941
9942void HOptimizedGraphBuilder::GenerateSetValueOf(CallRuntime* call) {
9943  ASSERT(call->arguments()->length() == 2);
9944  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9945  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9946  HValue* value = Pop();
9947  HValue* object = Pop();
9948  // Check if object is a not a smi.
9949  HBasicBlock* if_smi = graph()->CreateBasicBlock();
9950  HBasicBlock* if_heap_object = graph()->CreateBasicBlock();
9951  HBasicBlock* join = graph()->CreateBasicBlock();
9952  FinishCurrentBlock(New<HIsSmiAndBranch>(object, if_smi, if_heap_object));
9953  Goto(if_smi, join);
9954
9955  // Check if object is a JSValue.
9956  set_current_block(if_heap_object);
9957  HHasInstanceTypeAndBranch* typecheck =
9958      New<HHasInstanceTypeAndBranch>(object, JS_VALUE_TYPE);
9959  HBasicBlock* if_js_value = graph()->CreateBasicBlock();
9960  HBasicBlock* not_js_value = graph()->CreateBasicBlock();
9961  typecheck->SetSuccessorAt(0, if_js_value);
9962  typecheck->SetSuccessorAt(1, not_js_value);
9963  FinishCurrentBlock(typecheck);
9964  Goto(not_js_value, join);
9965
9966  // Create in-object property store to kValueOffset.
9967  set_current_block(if_js_value);
9968  Add<HStoreNamedField>(object,
9969      HObjectAccess::ForJSObjectOffset(JSValue::kValueOffset), value);
9970  Goto(if_js_value, join);
9971  join->SetJoinId(call->id());
9972  set_current_block(join);
9973  return ast_context()->ReturnValue(value);
9974}
9975
9976
9977// Fast support for charCodeAt(n).
9978void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
9979  ASSERT(call->arguments()->length() == 2);
9980  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9981  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
9982  HValue* index = Pop();
9983  HValue* string = Pop();
9984  HInstruction* result = BuildStringCharCodeAt(string, index);
9985  return ast_context()->ReturnInstruction(result, call->id());
9986}
9987
9988
9989// Fast support for string.charAt(n) and string[n].
9990void HOptimizedGraphBuilder::GenerateStringCharFromCode(CallRuntime* call) {
9991  ASSERT(call->arguments()->length() == 1);
9992  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
9993  HValue* char_code = Pop();
9994  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
9995  return ast_context()->ReturnInstruction(result, call->id());
9996}
9997
9998
9999// Fast support for string.charAt(n) and string[n].
10000void HOptimizedGraphBuilder::GenerateStringCharAt(CallRuntime* call) {
10001  ASSERT(call->arguments()->length() == 2);
10002  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10003  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10004  HValue* index = Pop();
10005  HValue* string = Pop();
10006  HInstruction* char_code = BuildStringCharCodeAt(string, index);
10007  AddInstruction(char_code);
10008  HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
10009  return ast_context()->ReturnInstruction(result, call->id());
10010}
10011
10012
10013// Fast support for object equality testing.
10014void HOptimizedGraphBuilder::GenerateObjectEquals(CallRuntime* call) {
10015  ASSERT(call->arguments()->length() == 2);
10016  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10017  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10018  HValue* right = Pop();
10019  HValue* left = Pop();
10020  HCompareObjectEqAndBranch* result =
10021      New<HCompareObjectEqAndBranch>(left, right);
10022  return ast_context()->ReturnControl(result, call->id());
10023}
10024
10025
10026void HOptimizedGraphBuilder::GenerateLog(CallRuntime* call) {
10027  // %_Log is ignored in optimized code.
10028  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10029}
10030
10031
10032// Fast support for StringAdd.
10033void HOptimizedGraphBuilder::GenerateStringAdd(CallRuntime* call) {
10034  ASSERT_EQ(2, call->arguments()->length());
10035  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10036  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10037  HValue* right = Pop();
10038  HValue* left = Pop();
10039  HInstruction* result =
10040      NewUncasted<HStringAdd>(left, right, STRING_ADD_CHECK_BOTH);
10041  return ast_context()->ReturnInstruction(result, call->id());
10042}
10043
10044
10045// Fast support for SubString.
10046void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
10047  ASSERT_EQ(3, call->arguments()->length());
10048  CHECK_ALIVE(VisitArgumentList(call->arguments()));
10049  HCallStub* result = New<HCallStub>(CodeStub::SubString, 3);
10050  Drop(3);
10051  return ast_context()->ReturnInstruction(result, call->id());
10052}
10053
10054
10055// Fast support for StringCompare.
10056void HOptimizedGraphBuilder::GenerateStringCompare(CallRuntime* call) {
10057  ASSERT_EQ(2, call->arguments()->length());
10058  CHECK_ALIVE(VisitArgumentList(call->arguments()));
10059  HCallStub* result = New<HCallStub>(CodeStub::StringCompare, 2);
10060  Drop(2);
10061  return ast_context()->ReturnInstruction(result, call->id());
10062}
10063
10064
10065// Support for direct calls from JavaScript to native RegExp code.
10066void HOptimizedGraphBuilder::GenerateRegExpExec(CallRuntime* call) {
10067  ASSERT_EQ(4, call->arguments()->length());
10068  CHECK_ALIVE(VisitArgumentList(call->arguments()));
10069  HCallStub* result = New<HCallStub>(CodeStub::RegExpExec, 4);
10070  Drop(4);
10071  return ast_context()->ReturnInstruction(result, call->id());
10072}
10073
10074
10075// Construct a RegExp exec result with two in-object properties.
10076void HOptimizedGraphBuilder::GenerateRegExpConstructResult(CallRuntime* call) {
10077  ASSERT_EQ(3, call->arguments()->length());
10078  CHECK_ALIVE(VisitArgumentList(call->arguments()));
10079  HCallStub* result = New<HCallStub>(CodeStub::RegExpConstructResult, 3);
10080  Drop(3);
10081  return ast_context()->ReturnInstruction(result, call->id());
10082}
10083
10084
10085// Support for fast native caches.
10086void HOptimizedGraphBuilder::GenerateGetFromCache(CallRuntime* call) {
10087  return Bailout(kInlinedRuntimeFunctionGetFromCache);
10088}
10089
10090
10091// Fast support for number to string.
10092void HOptimizedGraphBuilder::GenerateNumberToString(CallRuntime* call) {
10093  ASSERT_EQ(1, call->arguments()->length());
10094  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10095  HValue* number = Pop();
10096  HValue* result = BuildNumberToString(
10097      number, handle(Type::Number(), isolate()));
10098  return ast_context()->ReturnValue(result);
10099}
10100
10101
10102// Fast call for custom callbacks.
10103void HOptimizedGraphBuilder::GenerateCallFunction(CallRuntime* call) {
10104  // 1 ~ The function to call is not itself an argument to the call.
10105  int arg_count = call->arguments()->length() - 1;
10106  ASSERT(arg_count >= 1);  // There's always at least a receiver.
10107
10108  for (int i = 0; i < arg_count; ++i) {
10109    CHECK_ALIVE(VisitArgument(call->arguments()->at(i)));
10110  }
10111  CHECK_ALIVE(VisitForValue(call->arguments()->last()));
10112
10113  HValue* function = Pop();
10114
10115  // Branch for function proxies, or other non-functions.
10116  HHasInstanceTypeAndBranch* typecheck =
10117      New<HHasInstanceTypeAndBranch>(function, JS_FUNCTION_TYPE);
10118  HBasicBlock* if_jsfunction = graph()->CreateBasicBlock();
10119  HBasicBlock* if_nonfunction = graph()->CreateBasicBlock();
10120  HBasicBlock* join = graph()->CreateBasicBlock();
10121  typecheck->SetSuccessorAt(0, if_jsfunction);
10122  typecheck->SetSuccessorAt(1, if_nonfunction);
10123  FinishCurrentBlock(typecheck);
10124
10125  set_current_block(if_jsfunction);
10126  HInstruction* invoke_result = Add<HInvokeFunction>(function, arg_count);
10127  Drop(arg_count);
10128  Push(invoke_result);
10129  Goto(if_jsfunction, join);
10130
10131  set_current_block(if_nonfunction);
10132  HInstruction* call_result = Add<HCallFunction>(function, arg_count);
10133  Drop(arg_count);
10134  Push(call_result);
10135  Goto(if_nonfunction, join);
10136
10137  set_current_block(join);
10138  join->SetJoinId(call->id());
10139  return ast_context()->ReturnValue(Pop());
10140}
10141
10142
10143// Fast call to math functions.
10144void HOptimizedGraphBuilder::GenerateMathPow(CallRuntime* call) {
10145  ASSERT_EQ(2, call->arguments()->length());
10146  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10147  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
10148  HValue* right = Pop();
10149  HValue* left = Pop();
10150  HInstruction* result = NewUncasted<HPower>(left, right);
10151  return ast_context()->ReturnInstruction(result, call->id());
10152}
10153
10154
10155void HOptimizedGraphBuilder::GenerateMathLog(CallRuntime* call) {
10156  ASSERT_EQ(1, call->arguments()->length());
10157  CHECK_ALIVE(VisitArgumentList(call->arguments()));
10158  HCallStub* result = New<HCallStub>(CodeStub::TranscendentalCache, 1);
10159  result->set_transcendental_type(TranscendentalCache::LOG);
10160  Drop(1);
10161  return ast_context()->ReturnInstruction(result, call->id());
10162}
10163
10164
10165void HOptimizedGraphBuilder::GenerateMathSqrt(CallRuntime* call) {
10166  ASSERT(call->arguments()->length() == 1);
10167  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10168  HValue* value = Pop();
10169  HInstruction* result = NewUncasted<HUnaryMathOperation>(value, kMathSqrt);
10170  return ast_context()->ReturnInstruction(result, call->id());
10171}
10172
10173
10174// Check whether two RegExps are equivalent
10175void HOptimizedGraphBuilder::GenerateIsRegExpEquivalent(CallRuntime* call) {
10176  return Bailout(kInlinedRuntimeFunctionIsRegExpEquivalent);
10177}
10178
10179
10180void HOptimizedGraphBuilder::GenerateGetCachedArrayIndex(CallRuntime* call) {
10181  ASSERT(call->arguments()->length() == 1);
10182  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
10183  HValue* value = Pop();
10184  HGetCachedArrayIndex* result = New<HGetCachedArrayIndex>(value);
10185  return ast_context()->ReturnInstruction(result, call->id());
10186}
10187
10188
10189void HOptimizedGraphBuilder::GenerateFastAsciiArrayJoin(CallRuntime* call) {
10190  return Bailout(kInlinedRuntimeFunctionFastAsciiArrayJoin);
10191}
10192
10193
10194// Support for generators.
10195void HOptimizedGraphBuilder::GenerateGeneratorNext(CallRuntime* call) {
10196  return Bailout(kInlinedRuntimeFunctionGeneratorNext);
10197}
10198
10199
10200void HOptimizedGraphBuilder::GenerateGeneratorThrow(CallRuntime* call) {
10201  return Bailout(kInlinedRuntimeFunctionGeneratorThrow);
10202}
10203
10204
10205void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
10206    CallRuntime* call) {
10207  Add<HDebugBreak>();
10208  return ast_context()->ReturnValue(graph()->GetConstant0());
10209}
10210
10211
10212#undef CHECK_BAILOUT
10213#undef CHECK_ALIVE
10214
10215
10216HEnvironment::HEnvironment(HEnvironment* outer,
10217                           Scope* scope,
10218                           Handle<JSFunction> closure,
10219                           Zone* zone)
10220    : closure_(closure),
10221      values_(0, zone),
10222      frame_type_(JS_FUNCTION),
10223      parameter_count_(0),
10224      specials_count_(1),
10225      local_count_(0),
10226      outer_(outer),
10227      entry_(NULL),
10228      pop_count_(0),
10229      push_count_(0),
10230      ast_id_(BailoutId::None()),
10231      zone_(zone) {
10232  Initialize(scope->num_parameters() + 1, scope->num_stack_slots(), 0);
10233}
10234
10235
10236HEnvironment::HEnvironment(Zone* zone, int parameter_count)
10237    : values_(0, zone),
10238      frame_type_(STUB),
10239      parameter_count_(parameter_count),
10240      specials_count_(1),
10241      local_count_(0),
10242      outer_(NULL),
10243      entry_(NULL),
10244      pop_count_(0),
10245      push_count_(0),
10246      ast_id_(BailoutId::None()),
10247      zone_(zone) {
10248  Initialize(parameter_count, 0, 0);
10249}
10250
10251
10252HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
10253    : values_(0, zone),
10254      frame_type_(JS_FUNCTION),
10255      parameter_count_(0),
10256      specials_count_(0),
10257      local_count_(0),
10258      outer_(NULL),
10259      entry_(NULL),
10260      pop_count_(0),
10261      push_count_(0),
10262      ast_id_(other->ast_id()),
10263      zone_(zone) {
10264  Initialize(other);
10265}
10266
10267
10268HEnvironment::HEnvironment(HEnvironment* outer,
10269                           Handle<JSFunction> closure,
10270                           FrameType frame_type,
10271                           int arguments,
10272                           Zone* zone)
10273    : closure_(closure),
10274      values_(arguments, zone),
10275      frame_type_(frame_type),
10276      parameter_count_(arguments),
10277      specials_count_(0),
10278      local_count_(0),
10279      outer_(outer),
10280      entry_(NULL),
10281      pop_count_(0),
10282      push_count_(0),
10283      ast_id_(BailoutId::None()),
10284      zone_(zone) {
10285}
10286
10287
10288void HEnvironment::Initialize(int parameter_count,
10289                              int local_count,
10290                              int stack_height) {
10291  parameter_count_ = parameter_count;
10292  local_count_ = local_count;
10293
10294  // Avoid reallocating the temporaries' backing store on the first Push.
10295  int total = parameter_count + specials_count_ + local_count + stack_height;
10296  values_.Initialize(total + 4, zone());
10297  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
10298}
10299
10300
10301void HEnvironment::Initialize(const HEnvironment* other) {
10302  closure_ = other->closure();
10303  values_.AddAll(other->values_, zone());
10304  assigned_variables_.Union(other->assigned_variables_, zone());
10305  frame_type_ = other->frame_type_;
10306  parameter_count_ = other->parameter_count_;
10307  local_count_ = other->local_count_;
10308  if (other->outer_ != NULL) outer_ = other->outer_->Copy();  // Deep copy.
10309  entry_ = other->entry_;
10310  pop_count_ = other->pop_count_;
10311  push_count_ = other->push_count_;
10312  specials_count_ = other->specials_count_;
10313  ast_id_ = other->ast_id_;
10314}
10315
10316
10317void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
10318  ASSERT(!block->IsLoopHeader());
10319  ASSERT(values_.length() == other->values_.length());
10320
10321  int length = values_.length();
10322  for (int i = 0; i < length; ++i) {
10323    HValue* value = values_[i];
10324    if (value != NULL && value->IsPhi() && value->block() == block) {
10325      // There is already a phi for the i'th value.
10326      HPhi* phi = HPhi::cast(value);
10327      // Assert index is correct and that we haven't missed an incoming edge.
10328      ASSERT(phi->merged_index() == i || !phi->HasMergedIndex());
10329      ASSERT(phi->OperandCount() == block->predecessors()->length());
10330      phi->AddInput(other->values_[i]);
10331    } else if (values_[i] != other->values_[i]) {
10332      // There is a fresh value on the incoming edge, a phi is needed.
10333      ASSERT(values_[i] != NULL && other->values_[i] != NULL);
10334      HPhi* phi = block->AddNewPhi(i);
10335      HValue* old_value = values_[i];
10336      for (int j = 0; j < block->predecessors()->length(); j++) {
10337        phi->AddInput(old_value);
10338      }
10339      phi->AddInput(other->values_[i]);
10340      this->values_[i] = phi;
10341    }
10342  }
10343}
10344
10345
10346void HEnvironment::Bind(int index, HValue* value) {
10347  ASSERT(value != NULL);
10348  assigned_variables_.Add(index, zone());
10349  values_[index] = value;
10350}
10351
10352
10353bool HEnvironment::HasExpressionAt(int index) const {
10354  return index >= parameter_count_ + specials_count_ + local_count_;
10355}
10356
10357
10358bool HEnvironment::ExpressionStackIsEmpty() const {
10359  ASSERT(length() >= first_expression_index());
10360  return length() == first_expression_index();
10361}
10362
10363
10364void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
10365  int count = index_from_top + 1;
10366  int index = values_.length() - count;
10367  ASSERT(HasExpressionAt(index));
10368  // The push count must include at least the element in question or else
10369  // the new value will not be included in this environment's history.
10370  if (push_count_ < count) {
10371    // This is the same effect as popping then re-pushing 'count' elements.
10372    pop_count_ += (count - push_count_);
10373    push_count_ = count;
10374  }
10375  values_[index] = value;
10376}
10377
10378
10379void HEnvironment::Drop(int count) {
10380  for (int i = 0; i < count; ++i) {
10381    Pop();
10382  }
10383}
10384
10385
10386HEnvironment* HEnvironment::Copy() const {
10387  return new(zone()) HEnvironment(this, zone());
10388}
10389
10390
10391HEnvironment* HEnvironment::CopyWithoutHistory() const {
10392  HEnvironment* result = Copy();
10393  result->ClearHistory();
10394  return result;
10395}
10396
10397
10398HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
10399  HEnvironment* new_env = Copy();
10400  for (int i = 0; i < values_.length(); ++i) {
10401    HPhi* phi = loop_header->AddNewPhi(i);
10402    phi->AddInput(values_[i]);
10403    new_env->values_[i] = phi;
10404  }
10405  new_env->ClearHistory();
10406  return new_env;
10407}
10408
10409
10410HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
10411                                                  Handle<JSFunction> target,
10412                                                  FrameType frame_type,
10413                                                  int arguments) const {
10414  HEnvironment* new_env =
10415      new(zone()) HEnvironment(outer, target, frame_type,
10416                               arguments + 1, zone());
10417  for (int i = 0; i <= arguments; ++i) {  // Include receiver.
10418    new_env->Push(ExpressionStackAt(arguments - i));
10419  }
10420  new_env->ClearHistory();
10421  return new_env;
10422}
10423
10424
10425HEnvironment* HEnvironment::CopyForInlining(
10426    Handle<JSFunction> target,
10427    int arguments,
10428    FunctionLiteral* function,
10429    HConstant* undefined,
10430    InliningKind inlining_kind,
10431    bool undefined_receiver) const {
10432  ASSERT(frame_type() == JS_FUNCTION);
10433
10434  // Outer environment is a copy of this one without the arguments.
10435  int arity = function->scope()->num_parameters();
10436
10437  HEnvironment* outer = Copy();
10438  outer->Drop(arguments + 1);  // Including receiver.
10439  outer->ClearHistory();
10440
10441  if (inlining_kind == CONSTRUCT_CALL_RETURN) {
10442    // Create artificial constructor stub environment.  The receiver should
10443    // actually be the constructor function, but we pass the newly allocated
10444    // object instead, DoComputeConstructStubFrame() relies on that.
10445    outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
10446  } else if (inlining_kind == GETTER_CALL_RETURN) {
10447    // We need an additional StackFrame::INTERNAL frame for restoring the
10448    // correct context.
10449    outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
10450  } else if (inlining_kind == SETTER_CALL_RETURN) {
10451    // We need an additional StackFrame::INTERNAL frame for temporarily saving
10452    // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
10453    outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
10454  }
10455
10456  if (arity != arguments) {
10457    // Create artificial arguments adaptation environment.
10458    outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
10459  }
10460
10461  HEnvironment* inner =
10462      new(zone()) HEnvironment(outer, function->scope(), target, zone());
10463  // Get the argument values from the original environment.
10464  for (int i = 0; i <= arity; ++i) {  // Include receiver.
10465    HValue* push = (i <= arguments) ?
10466        ExpressionStackAt(arguments - i) : undefined;
10467    inner->SetValueAt(i, push);
10468  }
10469  // If the function we are inlining is a strict mode function or a
10470  // builtin function, pass undefined as the receiver for function
10471  // calls (instead of the global receiver).
10472  if (undefined_receiver) {
10473    inner->SetValueAt(0, undefined);
10474  }
10475  inner->SetValueAt(arity + 1, context());
10476  for (int i = arity + 2; i < inner->length(); ++i) {
10477    inner->SetValueAt(i, undefined);
10478  }
10479
10480  inner->set_ast_id(BailoutId::FunctionEntry());
10481  return inner;
10482}
10483
10484
10485void HEnvironment::PrintTo(StringStream* stream) {
10486  for (int i = 0; i < length(); i++) {
10487    if (i == 0) stream->Add("parameters\n");
10488    if (i == parameter_count()) stream->Add("specials\n");
10489    if (i == parameter_count() + specials_count()) stream->Add("locals\n");
10490    if (i == parameter_count() + specials_count() + local_count()) {
10491      stream->Add("expressions\n");
10492    }
10493    HValue* val = values_.at(i);
10494    stream->Add("%d: ", i);
10495    if (val != NULL) {
10496      val->PrintNameTo(stream);
10497    } else {
10498      stream->Add("NULL");
10499    }
10500    stream->Add("\n");
10501  }
10502  PrintF("\n");
10503}
10504
10505
10506void HEnvironment::PrintToStd() {
10507  HeapStringAllocator string_allocator;
10508  StringStream trace(&string_allocator);
10509  PrintTo(&trace);
10510  PrintF("%s", *trace.ToCString());
10511}
10512
10513
10514void HTracer::TraceCompilation(CompilationInfo* info) {
10515  Tag tag(this, "compilation");
10516  if (info->IsOptimizing()) {
10517    Handle<String> name = info->function()->debug_name();
10518    PrintStringProperty("name", *name->ToCString());
10519    PrintStringProperty("method", *name->ToCString());
10520  } else {
10521    CodeStub::Major major_key = info->code_stub()->MajorKey();
10522    PrintStringProperty("name", CodeStub::MajorName(major_key, false));
10523    PrintStringProperty("method", "stub");
10524  }
10525  PrintLongProperty("date", static_cast<int64_t>(OS::TimeCurrentMillis()));
10526}
10527
10528
10529void HTracer::TraceLithium(const char* name, LChunk* chunk) {
10530  ASSERT(!chunk->isolate()->concurrent_recompilation_enabled());
10531  AllowHandleDereference allow_deref;
10532  AllowDeferredHandleDereference allow_deferred_deref;
10533  Trace(name, chunk->graph(), chunk);
10534}
10535
10536
10537void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
10538  ASSERT(!graph->isolate()->concurrent_recompilation_enabled());
10539  AllowHandleDereference allow_deref;
10540  AllowDeferredHandleDereference allow_deferred_deref;
10541  Trace(name, graph, NULL);
10542}
10543
10544
10545void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
10546  Tag tag(this, "cfg");
10547  PrintStringProperty("name", name);
10548  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
10549  for (int i = 0; i < blocks->length(); i++) {
10550    HBasicBlock* current = blocks->at(i);
10551    Tag block_tag(this, "block");
10552    PrintBlockProperty("name", current->block_id());
10553    PrintIntProperty("from_bci", -1);
10554    PrintIntProperty("to_bci", -1);
10555
10556    if (!current->predecessors()->is_empty()) {
10557      PrintIndent();
10558      trace_.Add("predecessors");
10559      for (int j = 0; j < current->predecessors()->length(); ++j) {
10560        trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
10561      }
10562      trace_.Add("\n");
10563    } else {
10564      PrintEmptyProperty("predecessors");
10565    }
10566
10567    if (current->end()->SuccessorCount() == 0) {
10568      PrintEmptyProperty("successors");
10569    } else  {
10570      PrintIndent();
10571      trace_.Add("successors");
10572      for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
10573        trace_.Add(" \"B%d\"", it.Current()->block_id());
10574      }
10575      trace_.Add("\n");
10576    }
10577
10578    PrintEmptyProperty("xhandlers");
10579    const char* flags = current->IsLoopSuccessorDominator()
10580        ? "dom-loop-succ"
10581        : "";
10582    PrintStringProperty("flags", flags);
10583
10584    if (current->dominator() != NULL) {
10585      PrintBlockProperty("dominator", current->dominator()->block_id());
10586    }
10587
10588    PrintIntProperty("loop_depth", current->LoopNestingDepth());
10589
10590    if (chunk != NULL) {
10591      int first_index = current->first_instruction_index();
10592      int last_index = current->last_instruction_index();
10593      PrintIntProperty(
10594          "first_lir_id",
10595          LifetimePosition::FromInstructionIndex(first_index).Value());
10596      PrintIntProperty(
10597          "last_lir_id",
10598          LifetimePosition::FromInstructionIndex(last_index).Value());
10599    }
10600
10601    {
10602      Tag states_tag(this, "states");
10603      Tag locals_tag(this, "locals");
10604      int total = current->phis()->length();
10605      PrintIntProperty("size", current->phis()->length());
10606      PrintStringProperty("method", "None");
10607      for (int j = 0; j < total; ++j) {
10608        HPhi* phi = current->phis()->at(j);
10609        PrintIndent();
10610        trace_.Add("%d ", phi->merged_index());
10611        phi->PrintNameTo(&trace_);
10612        trace_.Add(" ");
10613        phi->PrintTo(&trace_);
10614        trace_.Add("\n");
10615      }
10616    }
10617
10618    {
10619      Tag HIR_tag(this, "HIR");
10620      for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
10621        HInstruction* instruction = it.Current();
10622        int bci = FLAG_emit_opt_code_positions && instruction->has_position() ?
10623            instruction->position() : 0;
10624        int uses = instruction->UseCount();
10625        PrintIndent();
10626        trace_.Add("%d %d ", bci, uses);
10627        instruction->PrintNameTo(&trace_);
10628        trace_.Add(" ");
10629        instruction->PrintTo(&trace_);
10630        trace_.Add(" <|@\n");
10631      }
10632    }
10633
10634
10635    if (chunk != NULL) {
10636      Tag LIR_tag(this, "LIR");
10637      int first_index = current->first_instruction_index();
10638      int last_index = current->last_instruction_index();
10639      if (first_index != -1 && last_index != -1) {
10640        const ZoneList<LInstruction*>* instructions = chunk->instructions();
10641        for (int i = first_index; i <= last_index; ++i) {
10642          LInstruction* linstr = instructions->at(i);
10643          if (linstr != NULL) {
10644            PrintIndent();
10645            trace_.Add("%d ",
10646                       LifetimePosition::FromInstructionIndex(i).Value());
10647            linstr->PrintTo(&trace_);
10648            trace_.Add(" [hir:");
10649            linstr->hydrogen_value()->PrintNameTo(&trace_);
10650            trace_.Add("]");
10651            trace_.Add(" <|@\n");
10652          }
10653        }
10654      }
10655    }
10656  }
10657}
10658
10659
10660void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
10661  Tag tag(this, "intervals");
10662  PrintStringProperty("name", name);
10663
10664  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
10665  for (int i = 0; i < fixed_d->length(); ++i) {
10666    TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
10667  }
10668
10669  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
10670  for (int i = 0; i < fixed->length(); ++i) {
10671    TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
10672  }
10673
10674  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
10675  for (int i = 0; i < live_ranges->length(); ++i) {
10676    TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
10677  }
10678}
10679
10680
10681void HTracer::TraceLiveRange(LiveRange* range, const char* type,
10682                             Zone* zone) {
10683  if (range != NULL && !range->IsEmpty()) {
10684    PrintIndent();
10685    trace_.Add("%d %s", range->id(), type);
10686    if (range->HasRegisterAssigned()) {
10687      LOperand* op = range->CreateAssignedOperand(zone);
10688      int assigned_reg = op->index();
10689      if (op->IsDoubleRegister()) {
10690        trace_.Add(" \"%s\"",
10691                   DoubleRegister::AllocationIndexToString(assigned_reg));
10692      } else {
10693        ASSERT(op->IsRegister());
10694        trace_.Add(" \"%s\"", Register::AllocationIndexToString(assigned_reg));
10695      }
10696    } else if (range->IsSpilled()) {
10697      LOperand* op = range->TopLevel()->GetSpillOperand();
10698      if (op->IsDoubleStackSlot()) {
10699        trace_.Add(" \"double_stack:%d\"", op->index());
10700      } else {
10701        ASSERT(op->IsStackSlot());
10702        trace_.Add(" \"stack:%d\"", op->index());
10703      }
10704    }
10705    int parent_index = -1;
10706    if (range->IsChild()) {
10707      parent_index = range->parent()->id();
10708    } else {
10709      parent_index = range->id();
10710    }
10711    LOperand* op = range->FirstHint();
10712    int hint_index = -1;
10713    if (op != NULL && op->IsUnallocated()) {
10714      hint_index = LUnallocated::cast(op)->virtual_register();
10715    }
10716    trace_.Add(" %d %d", parent_index, hint_index);
10717    UseInterval* cur_interval = range->first_interval();
10718    while (cur_interval != NULL && range->Covers(cur_interval->start())) {
10719      trace_.Add(" [%d, %d[",
10720                 cur_interval->start().Value(),
10721                 cur_interval->end().Value());
10722      cur_interval = cur_interval->next();
10723    }
10724
10725    UsePosition* current_pos = range->first_pos();
10726    while (current_pos != NULL) {
10727      if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
10728        trace_.Add(" %d M", current_pos->pos().Value());
10729      }
10730      current_pos = current_pos->next();
10731    }
10732
10733    trace_.Add(" \"\"\n");
10734  }
10735}
10736
10737
10738void HTracer::FlushToFile() {
10739  AppendChars(filename_.start(), *trace_.ToCString(), trace_.length(), false);
10740  trace_.Reset();
10741}
10742
10743
10744void HStatistics::Initialize(CompilationInfo* info) {
10745  if (info->shared_info().is_null()) return;
10746  source_size_ += info->shared_info()->SourceSize();
10747}
10748
10749
10750void HStatistics::Print() {
10751  PrintF("Timing results:\n");
10752  TimeDelta sum;
10753  for (int i = 0; i < times_.length(); ++i) {
10754    sum += times_[i];
10755  }
10756
10757  for (int i = 0; i < names_.length(); ++i) {
10758    PrintF("%32s", names_[i]);
10759    double ms = times_[i].InMillisecondsF();
10760    double percent = times_[i].PercentOf(sum);
10761    PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
10762
10763    unsigned size = sizes_[i];
10764    double size_percent = static_cast<double>(size) * 100 / total_size_;
10765    PrintF(" %9u bytes / %4.1f %%\n", size, size_percent);
10766  }
10767
10768  PrintF("----------------------------------------"
10769         "---------------------------------------\n");
10770  TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
10771  PrintF("%32s %8.3f ms / %4.1f %% \n",
10772         "Create graph",
10773         create_graph_.InMillisecondsF(),
10774         create_graph_.PercentOf(total));
10775  PrintF("%32s %8.3f ms / %4.1f %% \n",
10776         "Optimize graph",
10777         optimize_graph_.InMillisecondsF(),
10778         optimize_graph_.PercentOf(total));
10779  PrintF("%32s %8.3f ms / %4.1f %% \n",
10780         "Generate and install code",
10781         generate_code_.InMillisecondsF(),
10782         generate_code_.PercentOf(total));
10783  PrintF("----------------------------------------"
10784         "---------------------------------------\n");
10785  PrintF("%32s %8.3f ms (%.1f times slower than full code gen)\n",
10786         "Total",
10787         total.InMillisecondsF(),
10788         total.TimesOf(full_code_gen_));
10789
10790  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
10791  double normalized_time =  source_size_in_kb > 0
10792      ? total.InMillisecondsF() / source_size_in_kb
10793      : 0;
10794  double normalized_size_in_kb = source_size_in_kb > 0
10795      ? total_size_ / 1024 / source_size_in_kb
10796      : 0;
10797  PrintF("%32s %8.3f ms           %7.3f kB allocated\n",
10798         "Average per kB source",
10799         normalized_time, normalized_size_in_kb);
10800}
10801
10802
10803void HStatistics::SaveTiming(const char* name, TimeDelta time, unsigned size) {
10804  total_size_ += size;
10805  for (int i = 0; i < names_.length(); ++i) {
10806    if (strcmp(names_[i], name) == 0) {
10807      times_[i] += time;
10808      sizes_[i] += size;
10809      return;
10810    }
10811  }
10812  names_.Add(name);
10813  times_.Add(time);
10814  sizes_.Add(size);
10815}
10816
10817
10818HPhase::~HPhase() {
10819  if (ShouldProduceTraceOutput()) {
10820    isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
10821  }
10822
10823#ifdef DEBUG
10824  graph_->Verify(false);  // No full verify.
10825#endif
10826}
10827
10828} }  // namespace v8::internal
10829