1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/crankshaft/hydrogen.h"
6
7#include <memory>
8#include <sstream>
9
10#include "src/allocation-site-scopes.h"
11#include "src/ast/ast-numbering.h"
12#include "src/ast/compile-time-value.h"
13#include "src/ast/scopes.h"
14#include "src/code-factory.h"
15#include "src/crankshaft/hydrogen-bce.h"
16#include "src/crankshaft/hydrogen-canonicalize.h"
17#include "src/crankshaft/hydrogen-check-elimination.h"
18#include "src/crankshaft/hydrogen-dce.h"
19#include "src/crankshaft/hydrogen-dehoist.h"
20#include "src/crankshaft/hydrogen-environment-liveness.h"
21#include "src/crankshaft/hydrogen-escape-analysis.h"
22#include "src/crankshaft/hydrogen-gvn.h"
23#include "src/crankshaft/hydrogen-infer-representation.h"
24#include "src/crankshaft/hydrogen-infer-types.h"
25#include "src/crankshaft/hydrogen-load-elimination.h"
26#include "src/crankshaft/hydrogen-mark-unreachable.h"
27#include "src/crankshaft/hydrogen-osr.h"
28#include "src/crankshaft/hydrogen-range-analysis.h"
29#include "src/crankshaft/hydrogen-redundant-phi.h"
30#include "src/crankshaft/hydrogen-removable-simulates.h"
31#include "src/crankshaft/hydrogen-representation-changes.h"
32#include "src/crankshaft/hydrogen-sce.h"
33#include "src/crankshaft/hydrogen-store-elimination.h"
34#include "src/crankshaft/hydrogen-uint32-analysis.h"
35#include "src/crankshaft/lithium-allocator.h"
36#include "src/crankshaft/typing.h"
37#include "src/field-type.h"
38#include "src/full-codegen/full-codegen.h"
39#include "src/globals.h"
40#include "src/ic/call-optimization.h"
41#include "src/ic/ic.h"
42// GetRootConstructor
43#include "src/ic/ic-inl.h"
44#include "src/isolate-inl.h"
45#include "src/runtime/runtime.h"
46
47#if V8_TARGET_ARCH_IA32
48#include "src/crankshaft/ia32/lithium-codegen-ia32.h"  // NOLINT
49#elif V8_TARGET_ARCH_X64
50#include "src/crankshaft/x64/lithium-codegen-x64.h"  // NOLINT
51#elif V8_TARGET_ARCH_ARM64
52#include "src/crankshaft/arm64/lithium-codegen-arm64.h"  // NOLINT
53#elif V8_TARGET_ARCH_ARM
54#include "src/crankshaft/arm/lithium-codegen-arm.h"  // NOLINT
55#elif V8_TARGET_ARCH_PPC
56#include "src/crankshaft/ppc/lithium-codegen-ppc.h"  // NOLINT
57#elif V8_TARGET_ARCH_MIPS
58#include "src/crankshaft/mips/lithium-codegen-mips.h"  // NOLINT
59#elif V8_TARGET_ARCH_MIPS64
60#include "src/crankshaft/mips64/lithium-codegen-mips64.h"  // NOLINT
61#elif V8_TARGET_ARCH_S390
62#include "src/crankshaft/s390/lithium-codegen-s390.h"  // NOLINT
63#elif V8_TARGET_ARCH_X87
64#include "src/crankshaft/x87/lithium-codegen-x87.h"  // NOLINT
65#else
66#error Unsupported target architecture.
67#endif
68
69namespace v8 {
70namespace internal {
71
72const auto GetRegConfig = RegisterConfiguration::Crankshaft;
73
74class HOptimizedGraphBuilderWithPositions : public HOptimizedGraphBuilder {
75 public:
76  explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
77      : HOptimizedGraphBuilder(info, true) {
78    SetSourcePosition(info->shared_info()->start_position());
79  }
80
81#define DEF_VISIT(type)                                      \
82  void Visit##type(type* node) override {                    \
83    SourcePosition old_position = SourcePosition::Unknown(); \
84    if (node->position() != kNoSourcePosition) {             \
85      old_position = source_position();                      \
86      SetSourcePosition(node->position());                   \
87    }                                                        \
88    HOptimizedGraphBuilder::Visit##type(node);               \
89    if (old_position.IsKnown()) {                            \
90      set_source_position(old_position);                     \
91    }                                                        \
92  }
93  EXPRESSION_NODE_LIST(DEF_VISIT)
94#undef DEF_VISIT
95
96#define DEF_VISIT(type)                                      \
97  void Visit##type(type* node) override {                    \
98    SourcePosition old_position = SourcePosition::Unknown(); \
99    if (node->position() != kNoSourcePosition) {             \
100      old_position = source_position();                      \
101      SetSourcePosition(node->position());                   \
102    }                                                        \
103    HOptimizedGraphBuilder::Visit##type(node);               \
104    if (old_position.IsKnown()) {                            \
105      set_source_position(old_position);                     \
106    }                                                        \
107  }
108  STATEMENT_NODE_LIST(DEF_VISIT)
109#undef DEF_VISIT
110
111#define DEF_VISIT(type)                        \
112  void Visit##type(type* node) override {      \
113    HOptimizedGraphBuilder::Visit##type(node); \
114  }
115  DECLARATION_NODE_LIST(DEF_VISIT)
116#undef DEF_VISIT
117};
118
119HCompilationJob::Status HCompilationJob::PrepareJobImpl() {
120  if (!isolate()->use_crankshaft() ||
121      info()->shared_info()->must_use_ignition_turbo()) {
122    // Crankshaft is entirely disabled.
123    return FAILED;
124  }
125
126  // Optimization requires a version of fullcode with deoptimization support.
127  // Recompile the unoptimized version of the code if the current version
128  // doesn't have deoptimization support already.
129  // Otherwise, if we are gathering compilation time and space statistics
130  // for hydrogen, gather baseline statistics for a fullcode compilation.
131  bool should_recompile = !info()->shared_info()->has_deoptimization_support();
132  if (should_recompile || FLAG_hydrogen_stats) {
133    base::ElapsedTimer timer;
134    if (FLAG_hydrogen_stats) {
135      timer.Start();
136    }
137    if (!Compiler::EnsureDeoptimizationSupport(info())) {
138      return FAILED;
139    }
140    if (FLAG_hydrogen_stats) {
141      isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
142    }
143  }
144  DCHECK(info()->shared_info()->has_deoptimization_support());
145
146  // Check the whitelist for Crankshaft.
147  if (!info()->shared_info()->PassesFilter(FLAG_hydrogen_filter)) {
148    return AbortOptimization(kHydrogenFilter);
149  }
150
151  Scope* scope = info()->scope();
152  if (LUnallocated::TooManyParameters(scope->num_parameters())) {
153    // Crankshaft would require too many Lithium operands.
154    return AbortOptimization(kTooManyParameters);
155  }
156
157  if (info()->is_osr() &&
158      LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
159                                                  scope->num_stack_slots())) {
160    // Crankshaft would require too many Lithium operands.
161    return AbortOptimization(kTooManyParametersLocals);
162  }
163
164  if (IsGeneratorFunction(info()->shared_info()->kind())) {
165    // Crankshaft does not support generators.
166    return AbortOptimization(kGenerator);
167  }
168
169  if (FLAG_trace_hydrogen) {
170    isolate()->GetHTracer()->TraceCompilation(info());
171  }
172
173  // Optimization could have been disabled by the parser. Note that this check
174  // is only needed because the Hydrogen graph builder is missing some bailouts.
175  if (info()->shared_info()->optimization_disabled()) {
176    return AbortOptimization(
177        info()->shared_info()->disable_optimization_reason());
178  }
179
180  HOptimizedGraphBuilder* graph_builder =
181      (FLAG_hydrogen_track_positions || isolate()->is_profiling() ||
182       FLAG_trace_ic)
183          ? new (info()->zone()) HOptimizedGraphBuilderWithPositions(info())
184          : new (info()->zone()) HOptimizedGraphBuilder(info(), false);
185
186  // Type-check the function.
187  AstTyper(info()->isolate(), info()->zone(), info()->closure(),
188           info()->scope(), info()->osr_ast_id(), info()->literal(),
189           graph_builder->bounds())
190      .Run();
191
192  graph_ = graph_builder->CreateGraph();
193
194  if (isolate()->has_pending_exception()) {
195    return FAILED;
196  }
197
198  if (graph_ == NULL) return FAILED;
199
200  if (info()->dependencies()->HasAborted()) {
201    // Dependency has changed during graph creation. Let's try again later.
202    return RetryOptimization(kBailedOutDueToDependencyChange);
203  }
204
205  return SUCCEEDED;
206}
207
208HCompilationJob::Status HCompilationJob::ExecuteJobImpl() {
209  DCHECK(graph_ != NULL);
210  BailoutReason bailout_reason = kNoReason;
211
212  if (graph_->Optimize(&bailout_reason)) {
213    chunk_ = LChunk::NewChunk(graph_);
214    if (chunk_ != NULL) return SUCCEEDED;
215  } else if (bailout_reason != kNoReason) {
216    info()->AbortOptimization(bailout_reason);
217  }
218
219  return FAILED;
220}
221
222HCompilationJob::Status HCompilationJob::FinalizeJobImpl() {
223  DCHECK(chunk_ != NULL);
224  DCHECK(graph_ != NULL);
225  {
226    // Deferred handles reference objects that were accessible during
227    // graph creation.  To make sure that we don't encounter inconsistencies
228    // between graph creation and code generation, we disallow accessing
229    // objects through deferred handles during the latter, with exceptions.
230    DisallowDeferredHandleDereference no_deferred_handle_deref;
231    Handle<Code> optimized_code = chunk_->Codegen();
232    if (optimized_code.is_null()) {
233      if (info()->bailout_reason() == kNoReason) {
234        return AbortOptimization(kCodeGenerationFailed);
235      }
236      return FAILED;
237    }
238    RegisterWeakObjectsInOptimizedCode(optimized_code);
239    info()->SetCode(optimized_code);
240  }
241  // Add to the weak list of optimized code objects.
242  info()->context()->native_context()->AddOptimizedCode(*info()->code());
243  return SUCCEEDED;
244}
245
246HBasicBlock::HBasicBlock(HGraph* graph)
247    : block_id_(graph->GetNextBlockID()),
248      graph_(graph),
249      phis_(4, graph->zone()),
250      first_(NULL),
251      last_(NULL),
252      end_(NULL),
253      loop_information_(NULL),
254      predecessors_(2, graph->zone()),
255      dominator_(NULL),
256      dominated_blocks_(4, graph->zone()),
257      last_environment_(NULL),
258      argument_count_(-1),
259      first_instruction_index_(-1),
260      last_instruction_index_(-1),
261      deleted_phis_(4, graph->zone()),
262      parent_loop_header_(NULL),
263      inlined_entry_block_(NULL),
264      is_inline_return_target_(false),
265      is_reachable_(true),
266      dominates_loop_successors_(false),
267      is_osr_entry_(false),
268      is_ordered_(false) { }
269
270
271Isolate* HBasicBlock::isolate() const {
272  return graph_->isolate();
273}
274
275
276void HBasicBlock::MarkUnreachable() {
277  is_reachable_ = false;
278}
279
280
281void HBasicBlock::AttachLoopInformation() {
282  DCHECK(!IsLoopHeader());
283  loop_information_ = new(zone()) HLoopInformation(this, zone());
284}
285
286
287void HBasicBlock::DetachLoopInformation() {
288  DCHECK(IsLoopHeader());
289  loop_information_ = NULL;
290}
291
292
293void HBasicBlock::AddPhi(HPhi* phi) {
294  DCHECK(!IsStartBlock());
295  phis_.Add(phi, zone());
296  phi->SetBlock(this);
297}
298
299
300void HBasicBlock::RemovePhi(HPhi* phi) {
301  DCHECK(phi->block() == this);
302  DCHECK(phis_.Contains(phi));
303  phi->Kill();
304  phis_.RemoveElement(phi);
305  phi->SetBlock(NULL);
306}
307
308
309void HBasicBlock::AddInstruction(HInstruction* instr, SourcePosition position) {
310  DCHECK(!IsStartBlock() || !IsFinished());
311  DCHECK(!instr->IsLinked());
312  DCHECK(!IsFinished());
313
314  if (position.IsKnown()) {
315    instr->set_position(position);
316  }
317  if (first_ == NULL) {
318    DCHECK(last_environment() != NULL);
319    DCHECK(!last_environment()->ast_id().IsNone());
320    HBlockEntry* entry = new(zone()) HBlockEntry();
321    entry->InitializeAsFirst(this);
322    if (position.IsKnown()) {
323      entry->set_position(position);
324    } else {
325      DCHECK(!FLAG_hydrogen_track_positions ||
326             !graph()->info()->IsOptimizing() || instr->IsAbnormalExit());
327    }
328    first_ = last_ = entry;
329  }
330  instr->InsertAfter(last_);
331}
332
333
334HPhi* HBasicBlock::AddNewPhi(int merged_index) {
335  if (graph()->IsInsideNoSideEffectsScope()) {
336    merged_index = HPhi::kInvalidMergedIndex;
337  }
338  HPhi* phi = new(zone()) HPhi(merged_index, zone());
339  AddPhi(phi);
340  return phi;
341}
342
343
344HSimulate* HBasicBlock::CreateSimulate(BailoutId ast_id,
345                                       RemovableSimulate removable) {
346  DCHECK(HasEnvironment());
347  HEnvironment* environment = last_environment();
348  DCHECK(ast_id.IsNone() ||
349         ast_id == BailoutId::StubEntry() ||
350         environment->closure()->shared()->VerifyBailoutId(ast_id));
351
352  int push_count = environment->push_count();
353  int pop_count = environment->pop_count();
354
355  HSimulate* instr =
356      new(zone()) HSimulate(ast_id, pop_count, zone(), removable);
357#ifdef DEBUG
358  instr->set_closure(environment->closure());
359#endif
360  // Order of pushed values: newest (top of stack) first. This allows
361  // HSimulate::MergeWith() to easily append additional pushed values
362  // that are older (from further down the stack).
363  for (int i = 0; i < push_count; ++i) {
364    instr->AddPushedValue(environment->ExpressionStackAt(i));
365  }
366  for (GrowableBitVector::Iterator it(environment->assigned_variables(),
367                                      zone());
368       !it.Done();
369       it.Advance()) {
370    int index = it.Current();
371    instr->AddAssignedValue(index, environment->Lookup(index));
372  }
373  environment->ClearHistory();
374  return instr;
375}
376
377
378void HBasicBlock::Finish(HControlInstruction* end, SourcePosition position) {
379  DCHECK(!IsFinished());
380  AddInstruction(end, position);
381  end_ = end;
382  for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
383    it.Current()->RegisterPredecessor(this);
384  }
385}
386
387
388void HBasicBlock::Goto(HBasicBlock* block, SourcePosition position,
389                       FunctionState* state, bool add_simulate) {
390  bool drop_extra = state != NULL &&
391      state->inlining_kind() == NORMAL_RETURN;
392
393  if (block->IsInlineReturnTarget()) {
394    HEnvironment* env = last_environment();
395    int argument_count = env->arguments_environment()->parameter_count();
396    AddInstruction(new(zone())
397                   HLeaveInlined(state->entry(), argument_count),
398                   position);
399    UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
400  }
401
402  if (add_simulate) AddNewSimulate(BailoutId::None(), position);
403  HGoto* instr = new(zone()) HGoto(block);
404  Finish(instr, position);
405}
406
407
408void HBasicBlock::AddLeaveInlined(HValue* return_value, FunctionState* state,
409                                  SourcePosition position) {
410  HBasicBlock* target = state->function_return();
411  bool drop_extra = state->inlining_kind() == NORMAL_RETURN;
412
413  DCHECK(target->IsInlineReturnTarget());
414  DCHECK(return_value != NULL);
415  HEnvironment* env = last_environment();
416  int argument_count = env->arguments_environment()->parameter_count();
417  AddInstruction(new(zone()) HLeaveInlined(state->entry(), argument_count),
418                 position);
419  UpdateEnvironment(last_environment()->DiscardInlined(drop_extra));
420  last_environment()->Push(return_value);
421  AddNewSimulate(BailoutId::None(), position);
422  HGoto* instr = new(zone()) HGoto(target);
423  Finish(instr, position);
424}
425
426
427void HBasicBlock::SetInitialEnvironment(HEnvironment* env) {
428  DCHECK(!HasEnvironment());
429  DCHECK(first() == NULL);
430  UpdateEnvironment(env);
431}
432
433
434void HBasicBlock::UpdateEnvironment(HEnvironment* env) {
435  last_environment_ = env;
436  graph()->update_maximum_environment_size(env->first_expression_index());
437}
438
439
440void HBasicBlock::SetJoinId(BailoutId ast_id) {
441  int length = predecessors_.length();
442  DCHECK(length > 0);
443  for (int i = 0; i < length; i++) {
444    HBasicBlock* predecessor = predecessors_[i];
445    DCHECK(predecessor->end()->IsGoto());
446    HSimulate* simulate = HSimulate::cast(predecessor->end()->previous());
447    DCHECK(i != 0 ||
448           (predecessor->last_environment()->closure().is_null() ||
449            predecessor->last_environment()->closure()->shared()
450              ->VerifyBailoutId(ast_id)));
451    simulate->set_ast_id(ast_id);
452    predecessor->last_environment()->set_ast_id(ast_id);
453  }
454}
455
456
457bool HBasicBlock::Dominates(HBasicBlock* other) const {
458  HBasicBlock* current = other->dominator();
459  while (current != NULL) {
460    if (current == this) return true;
461    current = current->dominator();
462  }
463  return false;
464}
465
466
467bool HBasicBlock::EqualToOrDominates(HBasicBlock* other) const {
468  if (this == other) return true;
469  return Dominates(other);
470}
471
472
473int HBasicBlock::LoopNestingDepth() const {
474  const HBasicBlock* current = this;
475  int result  = (current->IsLoopHeader()) ? 1 : 0;
476  while (current->parent_loop_header() != NULL) {
477    current = current->parent_loop_header();
478    result++;
479  }
480  return result;
481}
482
483
484void HBasicBlock::PostProcessLoopHeader(IterationStatement* stmt) {
485  DCHECK(IsLoopHeader());
486
487  SetJoinId(stmt->EntryId());
488  if (predecessors()->length() == 1) {
489    // This is a degenerated loop.
490    DetachLoopInformation();
491    return;
492  }
493
494  // Only the first entry into the loop is from outside the loop. All other
495  // entries must be back edges.
496  for (int i = 1; i < predecessors()->length(); ++i) {
497    loop_information()->RegisterBackEdge(predecessors()->at(i));
498  }
499}
500
501
502void HBasicBlock::MarkSuccEdgeUnreachable(int succ) {
503  DCHECK(IsFinished());
504  HBasicBlock* succ_block = end()->SuccessorAt(succ);
505
506  DCHECK(succ_block->predecessors()->length() == 1);
507  succ_block->MarkUnreachable();
508}
509
510
511void HBasicBlock::RegisterPredecessor(HBasicBlock* pred) {
512  if (HasPredecessor()) {
513    // Only loop header blocks can have a predecessor added after
514    // instructions have been added to the block (they have phis for all
515    // values in the environment, these phis may be eliminated later).
516    DCHECK(IsLoopHeader() || first_ == NULL);
517    HEnvironment* incoming_env = pred->last_environment();
518    if (IsLoopHeader()) {
519      DCHECK_EQ(phis()->length(), incoming_env->length());
520      for (int i = 0; i < phis_.length(); ++i) {
521        phis_[i]->AddInput(incoming_env->values()->at(i));
522      }
523    } else {
524      last_environment()->AddIncomingEdge(this, pred->last_environment());
525    }
526  } else if (!HasEnvironment() && !IsFinished()) {
527    DCHECK(!IsLoopHeader());
528    SetInitialEnvironment(pred->last_environment()->Copy());
529  }
530
531  predecessors_.Add(pred, zone());
532}
533
534
535void HBasicBlock::AddDominatedBlock(HBasicBlock* block) {
536  DCHECK(!dominated_blocks_.Contains(block));
537  // Keep the list of dominated blocks sorted such that if there is two
538  // succeeding block in this list, the predecessor is before the successor.
539  int index = 0;
540  while (index < dominated_blocks_.length() &&
541         dominated_blocks_[index]->block_id() < block->block_id()) {
542    ++index;
543  }
544  dominated_blocks_.InsertAt(index, block, zone());
545}
546
547
548void HBasicBlock::AssignCommonDominator(HBasicBlock* other) {
549  if (dominator_ == NULL) {
550    dominator_ = other;
551    other->AddDominatedBlock(this);
552  } else if (other->dominator() != NULL) {
553    HBasicBlock* first = dominator_;
554    HBasicBlock* second = other;
555
556    while (first != second) {
557      if (first->block_id() > second->block_id()) {
558        first = first->dominator();
559      } else {
560        second = second->dominator();
561      }
562      DCHECK(first != NULL && second != NULL);
563    }
564
565    if (dominator_ != first) {
566      DCHECK(dominator_->dominated_blocks_.Contains(this));
567      dominator_->dominated_blocks_.RemoveElement(this);
568      dominator_ = first;
569      first->AddDominatedBlock(this);
570    }
571  }
572}
573
574
575void HBasicBlock::AssignLoopSuccessorDominators() {
576  // Mark blocks that dominate all subsequent reachable blocks inside their
577  // loop. Exploit the fact that blocks are sorted in reverse post order. When
578  // the loop is visited in increasing block id order, if the number of
579  // non-loop-exiting successor edges at the dominator_candidate block doesn't
580  // exceed the number of previously encountered predecessor edges, there is no
581  // path from the loop header to any block with higher id that doesn't go
582  // through the dominator_candidate block. In this case, the
583  // dominator_candidate block is guaranteed to dominate all blocks reachable
584  // from it with higher ids.
585  HBasicBlock* last = loop_information()->GetLastBackEdge();
586  int outstanding_successors = 1;  // one edge from the pre-header
587  // Header always dominates everything.
588  MarkAsLoopSuccessorDominator();
589  for (int j = block_id(); j <= last->block_id(); ++j) {
590    HBasicBlock* dominator_candidate = graph_->blocks()->at(j);
591    for (HPredecessorIterator it(dominator_candidate); !it.Done();
592         it.Advance()) {
593      HBasicBlock* predecessor = it.Current();
594      // Don't count back edges.
595      if (predecessor->block_id() < dominator_candidate->block_id()) {
596        outstanding_successors--;
597      }
598    }
599
600    // If more successors than predecessors have been seen in the loop up to
601    // now, it's not possible to guarantee that the current block dominates
602    // all of the blocks with higher IDs. In this case, assume conservatively
603    // that those paths through loop that don't go through the current block
604    // contain all of the loop's dependencies. Also be careful to record
605    // dominator information about the current loop that's being processed,
606    // and not nested loops, which will be processed when
607    // AssignLoopSuccessorDominators gets called on their header.
608    DCHECK(outstanding_successors >= 0);
609    HBasicBlock* parent_loop_header = dominator_candidate->parent_loop_header();
610    if (outstanding_successors == 0 &&
611        (parent_loop_header == this && !dominator_candidate->IsLoopHeader())) {
612      dominator_candidate->MarkAsLoopSuccessorDominator();
613    }
614    HControlInstruction* end = dominator_candidate->end();
615    for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
616      HBasicBlock* successor = it.Current();
617      // Only count successors that remain inside the loop and don't loop back
618      // to a loop header.
619      if (successor->block_id() > dominator_candidate->block_id() &&
620          successor->block_id() <= last->block_id()) {
621        // Backwards edges must land on loop headers.
622        DCHECK(successor->block_id() > dominator_candidate->block_id() ||
623               successor->IsLoopHeader());
624        outstanding_successors++;
625      }
626    }
627  }
628}
629
630
631int HBasicBlock::PredecessorIndexOf(HBasicBlock* predecessor) const {
632  for (int i = 0; i < predecessors_.length(); ++i) {
633    if (predecessors_[i] == predecessor) return i;
634  }
635  UNREACHABLE();
636  return -1;
637}
638
639
640#ifdef DEBUG
641void HBasicBlock::Verify() {
642  // Check that every block is finished.
643  DCHECK(IsFinished());
644  DCHECK(block_id() >= 0);
645
646  // Check that the incoming edges are in edge split form.
647  if (predecessors_.length() > 1) {
648    for (int i = 0; i < predecessors_.length(); ++i) {
649      DCHECK(predecessors_[i]->end()->SecondSuccessor() == NULL);
650    }
651  }
652}
653#endif
654
655
656void HLoopInformation::RegisterBackEdge(HBasicBlock* block) {
657  this->back_edges_.Add(block, block->zone());
658  AddBlock(block);
659}
660
661
662HBasicBlock* HLoopInformation::GetLastBackEdge() const {
663  int max_id = -1;
664  HBasicBlock* result = NULL;
665  for (int i = 0; i < back_edges_.length(); ++i) {
666    HBasicBlock* cur = back_edges_[i];
667    if (cur->block_id() > max_id) {
668      max_id = cur->block_id();
669      result = cur;
670    }
671  }
672  return result;
673}
674
675
676void HLoopInformation::AddBlock(HBasicBlock* block) {
677  if (block == loop_header()) return;
678  if (block->parent_loop_header() == loop_header()) return;
679  if (block->parent_loop_header() != NULL) {
680    AddBlock(block->parent_loop_header());
681  } else {
682    block->set_parent_loop_header(loop_header());
683    blocks_.Add(block, block->zone());
684    for (int i = 0; i < block->predecessors()->length(); ++i) {
685      AddBlock(block->predecessors()->at(i));
686    }
687  }
688}
689
690
691#ifdef DEBUG
692
693// Checks reachability of the blocks in this graph and stores a bit in
694// the BitVector "reachable()" for every block that can be reached
695// from the start block of the graph. If "dont_visit" is non-null, the given
696// block is treated as if it would not be part of the graph. "visited_count()"
697// returns the number of reachable blocks.
698class ReachabilityAnalyzer BASE_EMBEDDED {
699 public:
700  ReachabilityAnalyzer(HBasicBlock* entry_block,
701                       int block_count,
702                       HBasicBlock* dont_visit)
703      : visited_count_(0),
704        stack_(16, entry_block->zone()),
705        reachable_(block_count, entry_block->zone()),
706        dont_visit_(dont_visit) {
707    PushBlock(entry_block);
708    Analyze();
709  }
710
711  int visited_count() const { return visited_count_; }
712  const BitVector* reachable() const { return &reachable_; }
713
714 private:
715  void PushBlock(HBasicBlock* block) {
716    if (block != NULL && block != dont_visit_ &&
717        !reachable_.Contains(block->block_id())) {
718      reachable_.Add(block->block_id());
719      stack_.Add(block, block->zone());
720      visited_count_++;
721    }
722  }
723
724  void Analyze() {
725    while (!stack_.is_empty()) {
726      HControlInstruction* end = stack_.RemoveLast()->end();
727      for (HSuccessorIterator it(end); !it.Done(); it.Advance()) {
728        PushBlock(it.Current());
729      }
730    }
731  }
732
733  int visited_count_;
734  ZoneList<HBasicBlock*> stack_;
735  BitVector reachable_;
736  HBasicBlock* dont_visit_;
737};
738
739
740void HGraph::Verify(bool do_full_verify) const {
741  Heap::RelocationLock relocation_lock(isolate()->heap());
742  AllowHandleDereference allow_deref;
743  AllowDeferredHandleDereference allow_deferred_deref;
744  for (int i = 0; i < blocks_.length(); i++) {
745    HBasicBlock* block = blocks_.at(i);
746
747    block->Verify();
748
749    // Check that every block contains at least one node and that only the last
750    // node is a control instruction.
751    HInstruction* current = block->first();
752    DCHECK(current != NULL && current->IsBlockEntry());
753    while (current != NULL) {
754      DCHECK((current->next() == NULL) == current->IsControlInstruction());
755      DCHECK(current->block() == block);
756      current->Verify();
757      current = current->next();
758    }
759
760    // Check that successors are correctly set.
761    HBasicBlock* first = block->end()->FirstSuccessor();
762    HBasicBlock* second = block->end()->SecondSuccessor();
763    DCHECK(second == NULL || first != NULL);
764
765    // Check that the predecessor array is correct.
766    if (first != NULL) {
767      DCHECK(first->predecessors()->Contains(block));
768      if (second != NULL) {
769        DCHECK(second->predecessors()->Contains(block));
770      }
771    }
772
773    // Check that phis have correct arguments.
774    for (int j = 0; j < block->phis()->length(); j++) {
775      HPhi* phi = block->phis()->at(j);
776      phi->Verify();
777    }
778
779    // Check that all join blocks have predecessors that end with an
780    // unconditional goto and agree on their environment node id.
781    if (block->predecessors()->length() >= 2) {
782      BailoutId id =
783          block->predecessors()->first()->last_environment()->ast_id();
784      for (int k = 0; k < block->predecessors()->length(); k++) {
785        HBasicBlock* predecessor = block->predecessors()->at(k);
786        DCHECK(predecessor->end()->IsGoto() ||
787               predecessor->end()->IsDeoptimize());
788        DCHECK(predecessor->last_environment()->ast_id() == id);
789      }
790    }
791  }
792
793  // Check special property of first block to have no predecessors.
794  DCHECK(blocks_.at(0)->predecessors()->is_empty());
795
796  if (do_full_verify) {
797    // Check that the graph is fully connected.
798    ReachabilityAnalyzer analyzer(entry_block_, blocks_.length(), NULL);
799    DCHECK(analyzer.visited_count() == blocks_.length());
800
801    // Check that entry block dominator is NULL.
802    DCHECK(entry_block_->dominator() == NULL);
803
804    // Check dominators.
805    for (int i = 0; i < blocks_.length(); ++i) {
806      HBasicBlock* block = blocks_.at(i);
807      if (block->dominator() == NULL) {
808        // Only start block may have no dominator assigned to.
809        DCHECK(i == 0);
810      } else {
811        // Assert that block is unreachable if dominator must not be visited.
812        ReachabilityAnalyzer dominator_analyzer(entry_block_,
813                                                blocks_.length(),
814                                                block->dominator());
815        DCHECK(!dominator_analyzer.reachable()->Contains(block->block_id()));
816      }
817    }
818  }
819}
820
821#endif
822
823
824HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
825                               int32_t value) {
826  if (!pointer->is_set()) {
827    // Can't pass GetInvalidContext() to HConstant::New, because that will
828    // recursively call GetConstant
829    HConstant* constant = HConstant::New(isolate(), zone(), NULL, value);
830    constant->InsertAfter(entry_block()->first());
831    pointer->set(constant);
832    return constant;
833  }
834  return ReinsertConstantIfNecessary(pointer->get());
835}
836
837
838HConstant* HGraph::ReinsertConstantIfNecessary(HConstant* constant) {
839  if (!constant->IsLinked()) {
840    // The constant was removed from the graph. Reinsert.
841    constant->ClearFlag(HValue::kIsDead);
842    constant->InsertAfter(entry_block()->first());
843  }
844  return constant;
845}
846
847
848HConstant* HGraph::GetConstant0() {
849  return GetConstant(&constant_0_, 0);
850}
851
852
853HConstant* HGraph::GetConstant1() {
854  return GetConstant(&constant_1_, 1);
855}
856
857
858HConstant* HGraph::GetConstantMinus1() {
859  return GetConstant(&constant_minus1_, -1);
860}
861
862
863HConstant* HGraph::GetConstantBool(bool value) {
864  return value ? GetConstantTrue() : GetConstantFalse();
865}
866
867#define DEFINE_GET_CONSTANT(Name, name, constant, type, htype, boolean_value, \
868                            undetectable)                                     \
869  HConstant* HGraph::GetConstant##Name() {                                    \
870    if (!constant_##name##_.is_set()) {                                       \
871      HConstant* constant = new (zone()) HConstant(                           \
872          Unique<Object>::CreateImmovable(isolate()->factory()->constant()),  \
873          Unique<Map>::CreateImmovable(isolate()->factory()->type##_map()),   \
874          false, Representation::Tagged(), htype, true, boolean_value,        \
875          undetectable, ODDBALL_TYPE);                                        \
876      constant->InsertAfter(entry_block()->first());                          \
877      constant_##name##_.set(constant);                                       \
878    }                                                                         \
879    return ReinsertConstantIfNecessary(constant_##name##_.get());             \
880  }
881
882DEFINE_GET_CONSTANT(Undefined, undefined, undefined_value, undefined,
883                    HType::Undefined(), false, true)
884DEFINE_GET_CONSTANT(True, true, true_value, boolean, HType::Boolean(), true,
885                    false)
886DEFINE_GET_CONSTANT(False, false, false_value, boolean, HType::Boolean(), false,
887                    false)
888DEFINE_GET_CONSTANT(Hole, the_hole, the_hole_value, the_hole, HType::None(),
889                    false, false)
890DEFINE_GET_CONSTANT(Null, null, null_value, null, HType::Null(), false, true)
891DEFINE_GET_CONSTANT(OptimizedOut, optimized_out, optimized_out, optimized_out,
892                    HType::None(), false, false)
893
894#undef DEFINE_GET_CONSTANT
895
896#define DEFINE_IS_CONSTANT(Name, name)                                         \
897bool HGraph::IsConstant##Name(HConstant* constant) {                           \
898  return constant_##name##_.is_set() && constant == constant_##name##_.get();  \
899}
900DEFINE_IS_CONSTANT(Undefined, undefined)
901DEFINE_IS_CONSTANT(0, 0)
902DEFINE_IS_CONSTANT(1, 1)
903DEFINE_IS_CONSTANT(Minus1, minus1)
904DEFINE_IS_CONSTANT(True, true)
905DEFINE_IS_CONSTANT(False, false)
906DEFINE_IS_CONSTANT(Hole, the_hole)
907DEFINE_IS_CONSTANT(Null, null)
908
909#undef DEFINE_IS_CONSTANT
910
911
912HConstant* HGraph::GetInvalidContext() {
913  return GetConstant(&constant_invalid_context_, 0xFFFFC0C7);
914}
915
916
917bool HGraph::IsStandardConstant(HConstant* constant) {
918  if (IsConstantUndefined(constant)) return true;
919  if (IsConstant0(constant)) return true;
920  if (IsConstant1(constant)) return true;
921  if (IsConstantMinus1(constant)) return true;
922  if (IsConstantTrue(constant)) return true;
923  if (IsConstantFalse(constant)) return true;
924  if (IsConstantHole(constant)) return true;
925  if (IsConstantNull(constant)) return true;
926  return false;
927}
928
929
930HGraphBuilder::IfBuilder::IfBuilder() : builder_(NULL), needs_compare_(true) {}
931
932
933HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder)
934    : needs_compare_(true) {
935  Initialize(builder);
936}
937
938
939HGraphBuilder::IfBuilder::IfBuilder(HGraphBuilder* builder,
940                                    HIfContinuation* continuation)
941    : needs_compare_(false), first_true_block_(NULL), first_false_block_(NULL) {
942  InitializeDontCreateBlocks(builder);
943  continuation->Continue(&first_true_block_, &first_false_block_);
944}
945
946
947void HGraphBuilder::IfBuilder::InitializeDontCreateBlocks(
948    HGraphBuilder* builder) {
949  builder_ = builder;
950  finished_ = false;
951  did_then_ = false;
952  did_else_ = false;
953  did_else_if_ = false;
954  did_and_ = false;
955  did_or_ = false;
956  captured_ = false;
957  pending_merge_block_ = false;
958  split_edge_merge_block_ = NULL;
959  merge_at_join_blocks_ = NULL;
960  normal_merge_at_join_block_count_ = 0;
961  deopt_merge_at_join_block_count_ = 0;
962}
963
964
965void HGraphBuilder::IfBuilder::Initialize(HGraphBuilder* builder) {
966  InitializeDontCreateBlocks(builder);
967  HEnvironment* env = builder->environment();
968  first_true_block_ = builder->CreateBasicBlock(env->Copy());
969  first_false_block_ = builder->CreateBasicBlock(env->Copy());
970}
971
972
973HControlInstruction* HGraphBuilder::IfBuilder::AddCompare(
974    HControlInstruction* compare) {
975  DCHECK(did_then_ == did_else_);
976  if (did_else_) {
977    // Handle if-then-elseif
978    did_else_if_ = true;
979    did_else_ = false;
980    did_then_ = false;
981    did_and_ = false;
982    did_or_ = false;
983    pending_merge_block_ = false;
984    split_edge_merge_block_ = NULL;
985    HEnvironment* env = builder()->environment();
986    first_true_block_ = builder()->CreateBasicBlock(env->Copy());
987    first_false_block_ = builder()->CreateBasicBlock(env->Copy());
988  }
989  if (split_edge_merge_block_ != NULL) {
990    HEnvironment* env = first_false_block_->last_environment();
991    HBasicBlock* split_edge = builder()->CreateBasicBlock(env->Copy());
992    if (did_or_) {
993      compare->SetSuccessorAt(0, split_edge);
994      compare->SetSuccessorAt(1, first_false_block_);
995    } else {
996      compare->SetSuccessorAt(0, first_true_block_);
997      compare->SetSuccessorAt(1, split_edge);
998    }
999    builder()->GotoNoSimulate(split_edge, split_edge_merge_block_);
1000  } else {
1001    compare->SetSuccessorAt(0, first_true_block_);
1002    compare->SetSuccessorAt(1, first_false_block_);
1003  }
1004  builder()->FinishCurrentBlock(compare);
1005  needs_compare_ = false;
1006  return compare;
1007}
1008
1009
1010void HGraphBuilder::IfBuilder::Or() {
1011  DCHECK(!needs_compare_);
1012  DCHECK(!did_and_);
1013  did_or_ = true;
1014  HEnvironment* env = first_false_block_->last_environment();
1015  if (split_edge_merge_block_ == NULL) {
1016    split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1017    builder()->GotoNoSimulate(first_true_block_, split_edge_merge_block_);
1018    first_true_block_ = split_edge_merge_block_;
1019  }
1020  builder()->set_current_block(first_false_block_);
1021  first_false_block_ = builder()->CreateBasicBlock(env->Copy());
1022}
1023
1024
1025void HGraphBuilder::IfBuilder::And() {
1026  DCHECK(!needs_compare_);
1027  DCHECK(!did_or_);
1028  did_and_ = true;
1029  HEnvironment* env = first_false_block_->last_environment();
1030  if (split_edge_merge_block_ == NULL) {
1031    split_edge_merge_block_ = builder()->CreateBasicBlock(env->Copy());
1032    builder()->GotoNoSimulate(first_false_block_, split_edge_merge_block_);
1033    first_false_block_ = split_edge_merge_block_;
1034  }
1035  builder()->set_current_block(first_true_block_);
1036  first_true_block_ = builder()->CreateBasicBlock(env->Copy());
1037}
1038
1039
1040void HGraphBuilder::IfBuilder::CaptureContinuation(
1041    HIfContinuation* continuation) {
1042  DCHECK(!did_else_if_);
1043  DCHECK(!finished_);
1044  DCHECK(!captured_);
1045
1046  HBasicBlock* true_block = NULL;
1047  HBasicBlock* false_block = NULL;
1048  Finish(&true_block, &false_block);
1049  DCHECK(true_block != NULL);
1050  DCHECK(false_block != NULL);
1051  continuation->Capture(true_block, false_block);
1052  captured_ = true;
1053  builder()->set_current_block(NULL);
1054  End();
1055}
1056
1057
1058void HGraphBuilder::IfBuilder::JoinContinuation(HIfContinuation* continuation) {
1059  DCHECK(!did_else_if_);
1060  DCHECK(!finished_);
1061  DCHECK(!captured_);
1062  HBasicBlock* true_block = NULL;
1063  HBasicBlock* false_block = NULL;
1064  Finish(&true_block, &false_block);
1065  merge_at_join_blocks_ = NULL;
1066  if (true_block != NULL && !true_block->IsFinished()) {
1067    DCHECK(continuation->IsTrueReachable());
1068    builder()->GotoNoSimulate(true_block, continuation->true_branch());
1069  }
1070  if (false_block != NULL && !false_block->IsFinished()) {
1071    DCHECK(continuation->IsFalseReachable());
1072    builder()->GotoNoSimulate(false_block, continuation->false_branch());
1073  }
1074  captured_ = true;
1075  End();
1076}
1077
1078
1079void HGraphBuilder::IfBuilder::Then() {
1080  DCHECK(!captured_);
1081  DCHECK(!finished_);
1082  did_then_ = true;
1083  if (needs_compare_) {
1084    // Handle if's without any expressions, they jump directly to the "else"
1085    // branch. However, we must pretend that the "then" branch is reachable,
1086    // so that the graph builder visits it and sees any live range extending
1087    // constructs within it.
1088    HConstant* constant_false = builder()->graph()->GetConstantFalse();
1089    ToBooleanHints boolean_type = ToBooleanHint::kBoolean;
1090    HBranch* branch = builder()->New<HBranch>(
1091        constant_false, boolean_type, first_true_block_, first_false_block_);
1092    builder()->FinishCurrentBlock(branch);
1093  }
1094  builder()->set_current_block(first_true_block_);
1095  pending_merge_block_ = true;
1096}
1097
1098
1099void HGraphBuilder::IfBuilder::Else() {
1100  DCHECK(did_then_);
1101  DCHECK(!captured_);
1102  DCHECK(!finished_);
1103  AddMergeAtJoinBlock(false);
1104  builder()->set_current_block(first_false_block_);
1105  pending_merge_block_ = true;
1106  did_else_ = true;
1107}
1108
1109void HGraphBuilder::IfBuilder::Deopt(DeoptimizeReason reason) {
1110  DCHECK(did_then_);
1111  builder()->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1112  AddMergeAtJoinBlock(true);
1113}
1114
1115
1116void HGraphBuilder::IfBuilder::Return(HValue* value) {
1117  HValue* parameter_count = builder()->graph()->GetConstantMinus1();
1118  builder()->FinishExitCurrentBlock(
1119      builder()->New<HReturn>(value, parameter_count));
1120  AddMergeAtJoinBlock(false);
1121}
1122
1123
1124void HGraphBuilder::IfBuilder::AddMergeAtJoinBlock(bool deopt) {
1125  if (!pending_merge_block_) return;
1126  HBasicBlock* block = builder()->current_block();
1127  DCHECK(block == NULL || !block->IsFinished());
1128  MergeAtJoinBlock* record = new (builder()->zone())
1129      MergeAtJoinBlock(block, deopt, merge_at_join_blocks_);
1130  merge_at_join_blocks_ = record;
1131  if (block != NULL) {
1132    DCHECK(block->end() == NULL);
1133    if (deopt) {
1134      normal_merge_at_join_block_count_++;
1135    } else {
1136      deopt_merge_at_join_block_count_++;
1137    }
1138  }
1139  builder()->set_current_block(NULL);
1140  pending_merge_block_ = false;
1141}
1142
1143
1144void HGraphBuilder::IfBuilder::Finish() {
1145  DCHECK(!finished_);
1146  if (!did_then_) {
1147    Then();
1148  }
1149  AddMergeAtJoinBlock(false);
1150  if (!did_else_) {
1151    Else();
1152    AddMergeAtJoinBlock(false);
1153  }
1154  finished_ = true;
1155}
1156
1157
1158void HGraphBuilder::IfBuilder::Finish(HBasicBlock** then_continuation,
1159                                      HBasicBlock** else_continuation) {
1160  Finish();
1161
1162  MergeAtJoinBlock* else_record = merge_at_join_blocks_;
1163  if (else_continuation != NULL) {
1164    *else_continuation = else_record->block_;
1165  }
1166  MergeAtJoinBlock* then_record = else_record->next_;
1167  if (then_continuation != NULL) {
1168    *then_continuation = then_record->block_;
1169  }
1170  DCHECK(then_record->next_ == NULL);
1171}
1172
1173
1174void HGraphBuilder::IfBuilder::EndUnreachable() {
1175  if (captured_) return;
1176  Finish();
1177  builder()->set_current_block(nullptr);
1178}
1179
1180
1181void HGraphBuilder::IfBuilder::End() {
1182  if (captured_) return;
1183  Finish();
1184
1185  int total_merged_blocks = normal_merge_at_join_block_count_ +
1186    deopt_merge_at_join_block_count_;
1187  DCHECK(total_merged_blocks >= 1);
1188  HBasicBlock* merge_block =
1189      total_merged_blocks == 1 ? NULL : builder()->graph()->CreateBasicBlock();
1190
1191  // Merge non-deopt blocks first to ensure environment has right size for
1192  // padding.
1193  MergeAtJoinBlock* current = merge_at_join_blocks_;
1194  while (current != NULL) {
1195    if (!current->deopt_ && current->block_ != NULL) {
1196      // If there is only one block that makes it through to the end of the
1197      // if, then just set it as the current block and continue rather then
1198      // creating an unnecessary merge block.
1199      if (total_merged_blocks == 1) {
1200        builder()->set_current_block(current->block_);
1201        return;
1202      }
1203      builder()->GotoNoSimulate(current->block_, merge_block);
1204    }
1205    current = current->next_;
1206  }
1207
1208  // Merge deopt blocks, padding when necessary.
1209  current = merge_at_join_blocks_;
1210  while (current != NULL) {
1211    if (current->deopt_ && current->block_ != NULL) {
1212      current->block_->FinishExit(
1213          HAbnormalExit::New(builder()->isolate(), builder()->zone(), NULL),
1214          SourcePosition::Unknown());
1215    }
1216    current = current->next_;
1217  }
1218  builder()->set_current_block(merge_block);
1219}
1220
1221
1222HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder) {
1223  Initialize(builder, NULL, kWhileTrue, NULL);
1224}
1225
1226
1227HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1228                                        LoopBuilder::Direction direction) {
1229  Initialize(builder, context, direction, builder->graph()->GetConstant1());
1230}
1231
1232
1233HGraphBuilder::LoopBuilder::LoopBuilder(HGraphBuilder* builder, HValue* context,
1234                                        LoopBuilder::Direction direction,
1235                                        HValue* increment_amount) {
1236  Initialize(builder, context, direction, increment_amount);
1237  increment_amount_ = increment_amount;
1238}
1239
1240
1241void HGraphBuilder::LoopBuilder::Initialize(HGraphBuilder* builder,
1242                                            HValue* context,
1243                                            Direction direction,
1244                                            HValue* increment_amount) {
1245  builder_ = builder;
1246  context_ = context;
1247  direction_ = direction;
1248  increment_amount_ = increment_amount;
1249
1250  finished_ = false;
1251  header_block_ = builder->CreateLoopHeaderBlock();
1252  body_block_ = NULL;
1253  exit_block_ = NULL;
1254  exit_trampoline_block_ = NULL;
1255}
1256
1257
1258HValue* HGraphBuilder::LoopBuilder::BeginBody(
1259    HValue* initial,
1260    HValue* terminating,
1261    Token::Value token) {
1262  DCHECK(direction_ != kWhileTrue);
1263  HEnvironment* env = builder_->environment();
1264  phi_ = header_block_->AddNewPhi(env->values()->length());
1265  phi_->AddInput(initial);
1266  env->Push(initial);
1267  builder_->GotoNoSimulate(header_block_);
1268
1269  HEnvironment* body_env = env->Copy();
1270  HEnvironment* exit_env = env->Copy();
1271  // Remove the phi from the expression stack
1272  body_env->Pop();
1273  exit_env->Pop();
1274  body_block_ = builder_->CreateBasicBlock(body_env);
1275  exit_block_ = builder_->CreateBasicBlock(exit_env);
1276
1277  builder_->set_current_block(header_block_);
1278  env->Pop();
1279  builder_->FinishCurrentBlock(builder_->New<HCompareNumericAndBranch>(
1280          phi_, terminating, token, body_block_, exit_block_));
1281
1282  builder_->set_current_block(body_block_);
1283  if (direction_ == kPreIncrement || direction_ == kPreDecrement) {
1284    Isolate* isolate = builder_->isolate();
1285    HValue* one = builder_->graph()->GetConstant1();
1286    if (direction_ == kPreIncrement) {
1287      increment_ = HAdd::New(isolate, zone(), context_, phi_, one);
1288    } else {
1289      increment_ = HSub::New(isolate, zone(), context_, phi_, one);
1290    }
1291    increment_->ClearFlag(HValue::kCanOverflow);
1292    builder_->AddInstruction(increment_);
1293    return increment_;
1294  } else {
1295    return phi_;
1296  }
1297}
1298
1299
1300void HGraphBuilder::LoopBuilder::BeginBody(int drop_count) {
1301  DCHECK(direction_ == kWhileTrue);
1302  HEnvironment* env = builder_->environment();
1303  builder_->GotoNoSimulate(header_block_);
1304  builder_->set_current_block(header_block_);
1305  env->Drop(drop_count);
1306}
1307
1308
1309void HGraphBuilder::LoopBuilder::Break() {
1310  if (exit_trampoline_block_ == NULL) {
1311    // Its the first time we saw a break.
1312    if (direction_ == kWhileTrue) {
1313      HEnvironment* env = builder_->environment()->Copy();
1314      exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1315    } else {
1316      HEnvironment* env = exit_block_->last_environment()->Copy();
1317      exit_trampoline_block_ = builder_->CreateBasicBlock(env);
1318      builder_->GotoNoSimulate(exit_block_, exit_trampoline_block_);
1319    }
1320  }
1321
1322  builder_->GotoNoSimulate(exit_trampoline_block_);
1323  builder_->set_current_block(NULL);
1324}
1325
1326
1327void HGraphBuilder::LoopBuilder::EndBody() {
1328  DCHECK(!finished_);
1329
1330  if (direction_ == kPostIncrement || direction_ == kPostDecrement) {
1331    Isolate* isolate = builder_->isolate();
1332    if (direction_ == kPostIncrement) {
1333      increment_ =
1334          HAdd::New(isolate, zone(), context_, phi_, increment_amount_);
1335    } else {
1336      increment_ =
1337          HSub::New(isolate, zone(), context_, phi_, increment_amount_);
1338    }
1339    increment_->ClearFlag(HValue::kCanOverflow);
1340    builder_->AddInstruction(increment_);
1341  }
1342
1343  if (direction_ != kWhileTrue) {
1344    // Push the new increment value on the expression stack to merge into
1345    // the phi.
1346    builder_->environment()->Push(increment_);
1347  }
1348  HBasicBlock* last_block = builder_->current_block();
1349  builder_->GotoNoSimulate(last_block, header_block_);
1350  header_block_->loop_information()->RegisterBackEdge(last_block);
1351
1352  if (exit_trampoline_block_ != NULL) {
1353    builder_->set_current_block(exit_trampoline_block_);
1354  } else {
1355    builder_->set_current_block(exit_block_);
1356  }
1357  finished_ = true;
1358}
1359
1360
1361HGraph* HGraphBuilder::CreateGraph() {
1362  DCHECK(!FLAG_minimal);
1363  graph_ = new (zone()) HGraph(info_, descriptor_);
1364  if (FLAG_hydrogen_stats) isolate()->GetHStatistics()->Initialize(info_);
1365  CompilationPhase phase("H_Block building", info_);
1366  set_current_block(graph()->entry_block());
1367  if (!BuildGraph()) return NULL;
1368  graph()->FinalizeUniqueness();
1369  return graph_;
1370}
1371
1372
1373HInstruction* HGraphBuilder::AddInstruction(HInstruction* instr) {
1374  DCHECK(current_block() != NULL);
1375  DCHECK(!FLAG_hydrogen_track_positions || position_.IsKnown() ||
1376         !info_->IsOptimizing());
1377  current_block()->AddInstruction(instr, source_position());
1378  if (graph()->IsInsideNoSideEffectsScope()) {
1379    instr->SetFlag(HValue::kHasNoObservableSideEffects);
1380  }
1381  return instr;
1382}
1383
1384
1385void HGraphBuilder::FinishCurrentBlock(HControlInstruction* last) {
1386  DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1387         position_.IsKnown());
1388  current_block()->Finish(last, source_position());
1389  if (last->IsReturn() || last->IsAbnormalExit()) {
1390    set_current_block(NULL);
1391  }
1392}
1393
1394
1395void HGraphBuilder::FinishExitCurrentBlock(HControlInstruction* instruction) {
1396  DCHECK(!FLAG_hydrogen_track_positions || !info_->IsOptimizing() ||
1397         position_.IsKnown());
1398  current_block()->FinishExit(instruction, source_position());
1399  if (instruction->IsReturn() || instruction->IsAbnormalExit()) {
1400    set_current_block(NULL);
1401  }
1402}
1403
1404
1405void HGraphBuilder::AddIncrementCounter(StatsCounter* counter) {
1406  if (FLAG_native_code_counters && counter->Enabled()) {
1407    HValue* reference = Add<HConstant>(ExternalReference(counter));
1408    HValue* old_value =
1409        Add<HLoadNamedField>(reference, nullptr, HObjectAccess::ForCounter());
1410    HValue* new_value = AddUncasted<HAdd>(old_value, graph()->GetConstant1());
1411    new_value->ClearFlag(HValue::kCanOverflow);  // Ignore counter overflow
1412    Add<HStoreNamedField>(reference, HObjectAccess::ForCounter(),
1413                          new_value, STORE_TO_INITIALIZED_ENTRY);
1414  }
1415}
1416
1417
1418void HGraphBuilder::AddSimulate(BailoutId id,
1419                                RemovableSimulate removable) {
1420  DCHECK(current_block() != NULL);
1421  DCHECK(!graph()->IsInsideNoSideEffectsScope());
1422  current_block()->AddNewSimulate(id, source_position(), removable);
1423}
1424
1425
1426HBasicBlock* HGraphBuilder::CreateBasicBlock(HEnvironment* env) {
1427  HBasicBlock* b = graph()->CreateBasicBlock();
1428  b->SetInitialEnvironment(env);
1429  return b;
1430}
1431
1432
1433HBasicBlock* HGraphBuilder::CreateLoopHeaderBlock() {
1434  HBasicBlock* header = graph()->CreateBasicBlock();
1435  HEnvironment* entry_env = environment()->CopyAsLoopHeader(header);
1436  header->SetInitialEnvironment(entry_env);
1437  header->AttachLoopInformation();
1438  return header;
1439}
1440
1441
1442HValue* HGraphBuilder::BuildGetElementsKind(HValue* object) {
1443  HValue* map = Add<HLoadNamedField>(object, nullptr, HObjectAccess::ForMap());
1444
1445  HValue* bit_field2 =
1446      Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField2());
1447  return BuildDecodeField<Map::ElementsKindBits>(bit_field2);
1448}
1449
1450
1451HValue* HGraphBuilder::BuildEnumLength(HValue* map) {
1452  NoObservableSideEffectsScope scope(this);
1453  HValue* bit_field3 =
1454      Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField3());
1455  return BuildDecodeField<Map::EnumLengthBits>(bit_field3);
1456}
1457
1458
1459HValue* HGraphBuilder::BuildCheckHeapObject(HValue* obj) {
1460  if (obj->type().IsHeapObject()) return obj;
1461  return Add<HCheckHeapObject>(obj);
1462}
1463
1464void HGraphBuilder::FinishExitWithHardDeoptimization(DeoptimizeReason reason) {
1465  Add<HDeoptimize>(reason, Deoptimizer::EAGER);
1466  FinishExitCurrentBlock(New<HAbnormalExit>());
1467}
1468
1469
1470HValue* HGraphBuilder::BuildCheckString(HValue* string) {
1471  if (!string->type().IsString()) {
1472    DCHECK(!string->IsConstant() ||
1473           !HConstant::cast(string)->HasStringValue());
1474    BuildCheckHeapObject(string);
1475    return Add<HCheckInstanceType>(string, HCheckInstanceType::IS_STRING);
1476  }
1477  return string;
1478}
1479
1480HValue* HGraphBuilder::BuildWrapReceiver(HValue* object, HValue* checked) {
1481  if (object->type().IsJSObject()) return object;
1482  HValue* function = checked->ActualValue();
1483  if (function->IsConstant() &&
1484      HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
1485    Handle<JSFunction> f = Handle<JSFunction>::cast(
1486        HConstant::cast(function)->handle(isolate()));
1487    SharedFunctionInfo* shared = f->shared();
1488    if (is_strict(shared->language_mode()) || shared->native()) return object;
1489  }
1490  return Add<HWrapReceiver>(object, checked);
1491}
1492
1493
1494HValue* HGraphBuilder::BuildCheckAndGrowElementsCapacity(
1495    HValue* object, HValue* elements, ElementsKind kind, HValue* length,
1496    HValue* capacity, HValue* key) {
1497  HValue* max_gap = Add<HConstant>(static_cast<int32_t>(JSObject::kMaxGap));
1498  HValue* max_capacity = AddUncasted<HAdd>(capacity, max_gap);
1499  Add<HBoundsCheck>(key, max_capacity);
1500
1501  HValue* new_capacity = BuildNewElementsCapacity(key);
1502  HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind, kind,
1503                                                   length, new_capacity);
1504  return new_elements;
1505}
1506
1507
1508HValue* HGraphBuilder::BuildCheckForCapacityGrow(
1509    HValue* object,
1510    HValue* elements,
1511    ElementsKind kind,
1512    HValue* length,
1513    HValue* key,
1514    bool is_js_array,
1515    PropertyAccessType access_type) {
1516  IfBuilder length_checker(this);
1517
1518  Token::Value token = IsHoleyElementsKind(kind) ? Token::GTE : Token::EQ;
1519  length_checker.If<HCompareNumericAndBranch>(key, length, token);
1520
1521  length_checker.Then();
1522
1523  HValue* current_capacity = AddLoadFixedArrayLength(elements);
1524
1525  if (top_info()->IsStub()) {
1526    IfBuilder capacity_checker(this);
1527    capacity_checker.If<HCompareNumericAndBranch>(key, current_capacity,
1528                                                  Token::GTE);
1529    capacity_checker.Then();
1530    HValue* new_elements = BuildCheckAndGrowElementsCapacity(
1531        object, elements, kind, length, current_capacity, key);
1532    environment()->Push(new_elements);
1533    capacity_checker.Else();
1534    environment()->Push(elements);
1535    capacity_checker.End();
1536  } else {
1537    HValue* result = Add<HMaybeGrowElements>(
1538        object, elements, key, current_capacity, is_js_array, kind);
1539    environment()->Push(result);
1540  }
1541
1542  if (is_js_array) {
1543    HValue* new_length = AddUncasted<HAdd>(key, graph_->GetConstant1());
1544    new_length->ClearFlag(HValue::kCanOverflow);
1545
1546    Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(kind),
1547                          new_length);
1548  }
1549
1550  if (access_type == STORE && kind == FAST_SMI_ELEMENTS) {
1551    HValue* checked_elements = environment()->Top();
1552
1553    // Write zero to ensure that the new element is initialized with some smi.
1554    Add<HStoreKeyed>(checked_elements, key, graph()->GetConstant0(), nullptr,
1555                     kind);
1556  }
1557
1558  length_checker.Else();
1559  Add<HBoundsCheck>(key, length);
1560
1561  environment()->Push(elements);
1562  length_checker.End();
1563
1564  return environment()->Pop();
1565}
1566
1567
1568HValue* HGraphBuilder::BuildCopyElementsOnWrite(HValue* object,
1569                                                HValue* elements,
1570                                                ElementsKind kind,
1571                                                HValue* length) {
1572  Factory* factory = isolate()->factory();
1573
1574  IfBuilder cow_checker(this);
1575
1576  cow_checker.If<HCompareMap>(elements, factory->fixed_cow_array_map());
1577  cow_checker.Then();
1578
1579  HValue* capacity = AddLoadFixedArrayLength(elements);
1580
1581  HValue* new_elements = BuildGrowElementsCapacity(object, elements, kind,
1582                                                   kind, length, capacity);
1583
1584  environment()->Push(new_elements);
1585
1586  cow_checker.Else();
1587
1588  environment()->Push(elements);
1589
1590  cow_checker.End();
1591
1592  return environment()->Pop();
1593}
1594
1595HValue* HGraphBuilder::BuildElementIndexHash(HValue* index) {
1596  int32_t seed_value = static_cast<uint32_t>(isolate()->heap()->HashSeed());
1597  HValue* seed = Add<HConstant>(seed_value);
1598  HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, index, seed);
1599
1600  // hash = ~hash + (hash << 15);
1601  HValue* shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(15));
1602  HValue* not_hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash,
1603                                           graph()->GetConstantMinus1());
1604  hash = AddUncasted<HAdd>(shifted_hash, not_hash);
1605
1606  // hash = hash ^ (hash >> 12);
1607  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(12));
1608  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1609
1610  // hash = hash + (hash << 2);
1611  shifted_hash = AddUncasted<HShl>(hash, Add<HConstant>(2));
1612  hash = AddUncasted<HAdd>(hash, shifted_hash);
1613
1614  // hash = hash ^ (hash >> 4);
1615  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(4));
1616  hash = AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1617
1618  // hash = hash * 2057;
1619  hash = AddUncasted<HMul>(hash, Add<HConstant>(2057));
1620  hash->ClearFlag(HValue::kCanOverflow);
1621
1622  // hash = hash ^ (hash >> 16);
1623  shifted_hash = AddUncasted<HShr>(hash, Add<HConstant>(16));
1624  return AddUncasted<HBitwise>(Token::BIT_XOR, hash, shifted_hash);
1625}
1626
1627HValue* HGraphBuilder::BuildUncheckedDictionaryElementLoad(HValue* receiver,
1628                                                           HValue* elements,
1629                                                           HValue* key,
1630                                                           HValue* hash) {
1631  HValue* capacity =
1632      Add<HLoadKeyed>(elements, Add<HConstant>(NameDictionary::kCapacityIndex),
1633                      nullptr, nullptr, FAST_ELEMENTS);
1634
1635  HValue* mask = AddUncasted<HSub>(capacity, graph()->GetConstant1());
1636  mask->ChangeRepresentation(Representation::Integer32());
1637  mask->ClearFlag(HValue::kCanOverflow);
1638
1639  HValue* entry = hash;
1640  HValue* count = graph()->GetConstant1();
1641  Push(entry);
1642  Push(count);
1643
1644  HIfContinuation return_or_loop_continuation(graph()->CreateBasicBlock(),
1645                                              graph()->CreateBasicBlock());
1646  HIfContinuation found_key_match_continuation(graph()->CreateBasicBlock(),
1647                                               graph()->CreateBasicBlock());
1648  LoopBuilder probe_loop(this);
1649  probe_loop.BeginBody(2);  // Drop entry, count from last environment to
1650                            // appease live range building without simulates.
1651
1652  count = Pop();
1653  entry = Pop();
1654  entry = AddUncasted<HBitwise>(Token::BIT_AND, entry, mask);
1655  int entry_size = SeededNumberDictionary::kEntrySize;
1656  HValue* base_index = AddUncasted<HMul>(entry, Add<HConstant>(entry_size));
1657  base_index->ClearFlag(HValue::kCanOverflow);
1658  int start_offset = SeededNumberDictionary::kElementsStartIndex;
1659  HValue* key_index =
1660      AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset));
1661  key_index->ClearFlag(HValue::kCanOverflow);
1662
1663  HValue* candidate_key =
1664      Add<HLoadKeyed>(elements, key_index, nullptr, nullptr, FAST_ELEMENTS);
1665  IfBuilder if_undefined(this);
1666  if_undefined.If<HCompareObjectEqAndBranch>(candidate_key,
1667                                             graph()->GetConstantUndefined());
1668  if_undefined.Then();
1669  {
1670    // element == undefined means "not found". Call the runtime.
1671    // TODO(jkummerow): walk the prototype chain instead.
1672    Add<HPushArguments>(receiver, key);
1673    Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1674                           2));
1675  }
1676  if_undefined.Else();
1677  {
1678    IfBuilder if_match(this);
1679    if_match.If<HCompareObjectEqAndBranch>(candidate_key, key);
1680    if_match.Then();
1681    if_match.Else();
1682
1683    // Update non-internalized string in the dictionary with internalized key?
1684    IfBuilder if_update_with_internalized(this);
1685    HValue* smi_check =
1686        if_update_with_internalized.IfNot<HIsSmiAndBranch>(candidate_key);
1687    if_update_with_internalized.And();
1688    HValue* map = AddLoadMap(candidate_key, smi_check);
1689    HValue* instance_type =
1690        Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
1691    HValue* not_internalized_bit = AddUncasted<HBitwise>(
1692        Token::BIT_AND, instance_type,
1693        Add<HConstant>(static_cast<int>(kIsNotInternalizedMask)));
1694    if_update_with_internalized.If<HCompareNumericAndBranch>(
1695        not_internalized_bit, graph()->GetConstant0(), Token::NE);
1696    if_update_with_internalized.And();
1697    if_update_with_internalized.IfNot<HCompareObjectEqAndBranch>(
1698        candidate_key, graph()->GetConstantHole());
1699    if_update_with_internalized.AndIf<HStringCompareAndBranch>(candidate_key,
1700                                                               key, Token::EQ);
1701    if_update_with_internalized.Then();
1702    // Replace a key that is a non-internalized string by the equivalent
1703    // internalized string for faster further lookups.
1704    Add<HStoreKeyed>(elements, key_index, key, nullptr, FAST_ELEMENTS);
1705    if_update_with_internalized.Else();
1706
1707    if_update_with_internalized.JoinContinuation(&found_key_match_continuation);
1708    if_match.JoinContinuation(&found_key_match_continuation);
1709
1710    IfBuilder found_key_match(this, &found_key_match_continuation);
1711    found_key_match.Then();
1712    // Key at current probe matches. Relevant bits in the |details| field must
1713    // be zero, otherwise the dictionary element requires special handling.
1714    HValue* details_index =
1715        AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 2));
1716    details_index->ClearFlag(HValue::kCanOverflow);
1717    HValue* details = Add<HLoadKeyed>(elements, details_index, nullptr, nullptr,
1718                                      FAST_ELEMENTS);
1719    int details_mask = PropertyDetails::KindField::kMask;
1720    details = AddUncasted<HBitwise>(Token::BIT_AND, details,
1721                                    Add<HConstant>(details_mask));
1722    IfBuilder details_compare(this);
1723    details_compare.If<HCompareNumericAndBranch>(details, New<HConstant>(kData),
1724                                                 Token::EQ);
1725    details_compare.Then();
1726    HValue* result_index =
1727        AddUncasted<HAdd>(base_index, Add<HConstant>(start_offset + 1));
1728    result_index->ClearFlag(HValue::kCanOverflow);
1729    Push(Add<HLoadKeyed>(elements, result_index, nullptr, nullptr,
1730                         FAST_ELEMENTS));
1731    details_compare.Else();
1732    Add<HPushArguments>(receiver, key);
1733    Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kKeyedGetProperty),
1734                           2));
1735    details_compare.End();
1736
1737    found_key_match.Else();
1738    found_key_match.JoinContinuation(&return_or_loop_continuation);
1739  }
1740  if_undefined.JoinContinuation(&return_or_loop_continuation);
1741
1742  IfBuilder return_or_loop(this, &return_or_loop_continuation);
1743  return_or_loop.Then();
1744  probe_loop.Break();
1745
1746  return_or_loop.Else();
1747  entry = AddUncasted<HAdd>(entry, count);
1748  entry->ClearFlag(HValue::kCanOverflow);
1749  count = AddUncasted<HAdd>(count, graph()->GetConstant1());
1750  count->ClearFlag(HValue::kCanOverflow);
1751  Push(entry);
1752  Push(count);
1753
1754  probe_loop.EndBody();
1755
1756  return_or_loop.End();
1757
1758  return Pop();
1759}
1760
1761HValue* HGraphBuilder::BuildCreateIterResultObject(HValue* value,
1762                                                   HValue* done) {
1763  NoObservableSideEffectsScope scope(this);
1764
1765  // Allocate the JSIteratorResult object.
1766  HValue* result =
1767      Add<HAllocate>(Add<HConstant>(JSIteratorResult::kSize), HType::JSObject(),
1768                     NOT_TENURED, JS_OBJECT_TYPE, graph()->GetConstant0());
1769
1770  // Initialize the JSIteratorResult object.
1771  HValue* native_context = BuildGetNativeContext();
1772  HValue* map = Add<HLoadNamedField>(
1773      native_context, nullptr,
1774      HObjectAccess::ForContextSlot(Context::ITERATOR_RESULT_MAP_INDEX));
1775  Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
1776  HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
1777  Add<HStoreNamedField>(result, HObjectAccess::ForPropertiesPointer(),
1778                        empty_fixed_array);
1779  Add<HStoreNamedField>(result, HObjectAccess::ForElementsPointer(),
1780                        empty_fixed_array);
1781  Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1782                                    JSIteratorResult::kValueOffset),
1783                        value);
1784  Add<HStoreNamedField>(result, HObjectAccess::ForObservableJSObjectOffset(
1785                                    JSIteratorResult::kDoneOffset),
1786                        done);
1787  STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1788  return result;
1789}
1790
1791
1792HValue* HGraphBuilder::BuildNumberToString(HValue* object, AstType* type) {
1793  NoObservableSideEffectsScope scope(this);
1794
1795  // Convert constant numbers at compile time.
1796  if (object->IsConstant() && HConstant::cast(object)->HasNumberValue()) {
1797    Handle<Object> number = HConstant::cast(object)->handle(isolate());
1798    Handle<String> result = isolate()->factory()->NumberToString(number);
1799    return Add<HConstant>(result);
1800  }
1801
1802  // Create a joinable continuation.
1803  HIfContinuation found(graph()->CreateBasicBlock(),
1804                        graph()->CreateBasicBlock());
1805
1806  // Load the number string cache.
1807  HValue* number_string_cache =
1808      Add<HLoadRoot>(Heap::kNumberStringCacheRootIndex);
1809
1810  // Make the hash mask from the length of the number string cache. It
1811  // contains two elements (number and string) for each cache entry.
1812  HValue* mask = AddLoadFixedArrayLength(number_string_cache);
1813  mask->set_type(HType::Smi());
1814  mask = AddUncasted<HSar>(mask, graph()->GetConstant1());
1815  mask = AddUncasted<HSub>(mask, graph()->GetConstant1());
1816
1817  // Check whether object is a smi.
1818  IfBuilder if_objectissmi(this);
1819  if_objectissmi.If<HIsSmiAndBranch>(object);
1820  if_objectissmi.Then();
1821  {
1822    // Compute hash for smi similar to smi_get_hash().
1823    HValue* hash = AddUncasted<HBitwise>(Token::BIT_AND, object, mask);
1824
1825    // Load the key.
1826    HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1827    HValue* key = Add<HLoadKeyed>(number_string_cache, key_index, nullptr,
1828                                  nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1829
1830    // Check if object == key.
1831    IfBuilder if_objectiskey(this);
1832    if_objectiskey.If<HCompareObjectEqAndBranch>(object, key);
1833    if_objectiskey.Then();
1834    {
1835      // Make the key_index available.
1836      Push(key_index);
1837    }
1838    if_objectiskey.JoinContinuation(&found);
1839  }
1840  if_objectissmi.Else();
1841  {
1842    if (type->Is(AstType::SignedSmall())) {
1843      if_objectissmi.Deopt(DeoptimizeReason::kExpectedSmi);
1844    } else {
1845      // Check if the object is a heap number.
1846      IfBuilder if_objectisnumber(this);
1847      HValue* objectisnumber = if_objectisnumber.If<HCompareMap>(
1848          object, isolate()->factory()->heap_number_map());
1849      if_objectisnumber.Then();
1850      {
1851        // Compute hash for heap number similar to double_get_hash().
1852        HValue* low = Add<HLoadNamedField>(
1853            object, objectisnumber,
1854            HObjectAccess::ForHeapNumberValueLowestBits());
1855        HValue* high = Add<HLoadNamedField>(
1856            object, objectisnumber,
1857            HObjectAccess::ForHeapNumberValueHighestBits());
1858        HValue* hash = AddUncasted<HBitwise>(Token::BIT_XOR, low, high);
1859        hash = AddUncasted<HBitwise>(Token::BIT_AND, hash, mask);
1860
1861        // Load the key.
1862        HValue* key_index = AddUncasted<HShl>(hash, graph()->GetConstant1());
1863        HValue* key =
1864            Add<HLoadKeyed>(number_string_cache, key_index, nullptr, nullptr,
1865                            FAST_ELEMENTS, ALLOW_RETURN_HOLE);
1866
1867        // Check if the key is a heap number and compare it with the object.
1868        IfBuilder if_keyisnotsmi(this);
1869        HValue* keyisnotsmi = if_keyisnotsmi.IfNot<HIsSmiAndBranch>(key);
1870        if_keyisnotsmi.Then();
1871        {
1872          IfBuilder if_keyisheapnumber(this);
1873          if_keyisheapnumber.If<HCompareMap>(
1874              key, isolate()->factory()->heap_number_map());
1875          if_keyisheapnumber.Then();
1876          {
1877            // Check if values of key and object match.
1878            IfBuilder if_keyeqobject(this);
1879            if_keyeqobject.If<HCompareNumericAndBranch>(
1880                Add<HLoadNamedField>(key, keyisnotsmi,
1881                                     HObjectAccess::ForHeapNumberValue()),
1882                Add<HLoadNamedField>(object, objectisnumber,
1883                                     HObjectAccess::ForHeapNumberValue()),
1884                Token::EQ);
1885            if_keyeqobject.Then();
1886            {
1887              // Make the key_index available.
1888              Push(key_index);
1889            }
1890            if_keyeqobject.JoinContinuation(&found);
1891          }
1892          if_keyisheapnumber.JoinContinuation(&found);
1893        }
1894        if_keyisnotsmi.JoinContinuation(&found);
1895      }
1896      if_objectisnumber.Else();
1897      {
1898        if (type->Is(AstType::Number())) {
1899          if_objectisnumber.Deopt(DeoptimizeReason::kExpectedHeapNumber);
1900        }
1901      }
1902      if_objectisnumber.JoinContinuation(&found);
1903    }
1904  }
1905  if_objectissmi.JoinContinuation(&found);
1906
1907  // Check for cache hit.
1908  IfBuilder if_found(this, &found);
1909  if_found.Then();
1910  {
1911    // Count number to string operation in native code.
1912    AddIncrementCounter(isolate()->counters()->number_to_string_native());
1913
1914    // Load the value in case of cache hit.
1915    HValue* key_index = Pop();
1916    HValue* value_index = AddUncasted<HAdd>(key_index, graph()->GetConstant1());
1917    Push(Add<HLoadKeyed>(number_string_cache, value_index, nullptr, nullptr,
1918                         FAST_ELEMENTS, ALLOW_RETURN_HOLE));
1919  }
1920  if_found.Else();
1921  {
1922    // Cache miss, fallback to runtime.
1923    Add<HPushArguments>(object);
1924    Push(Add<HCallRuntime>(
1925            Runtime::FunctionForId(Runtime::kNumberToStringSkipCache),
1926            1));
1927  }
1928  if_found.End();
1929
1930  return Pop();
1931}
1932
1933HValue* HGraphBuilder::BuildToNumber(HValue* input) {
1934  if (input->type().IsTaggedNumber() ||
1935      input->representation().IsSpecialization()) {
1936    return input;
1937  }
1938  Callable callable = CodeFactory::ToNumber(isolate());
1939  HValue* stub = Add<HConstant>(callable.code());
1940  HValue* values[] = {input};
1941  HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
1942      stub, 0, callable.descriptor(), ArrayVector(values));
1943  instr->set_type(HType::TaggedNumber());
1944  return instr;
1945}
1946
1947
1948HValue* HGraphBuilder::BuildToObject(HValue* receiver) {
1949  NoObservableSideEffectsScope scope(this);
1950
1951  // Create a joinable continuation.
1952  HIfContinuation wrap(graph()->CreateBasicBlock(),
1953                       graph()->CreateBasicBlock());
1954
1955  // Determine the proper global constructor function required to wrap
1956  // {receiver} into a JSValue, unless {receiver} is already a {JSReceiver}, in
1957  // which case we just return it.  Deopts to Runtime::kToObject if {receiver}
1958  // is undefined or null.
1959  IfBuilder receiver_is_smi(this);
1960  receiver_is_smi.If<HIsSmiAndBranch>(receiver);
1961  receiver_is_smi.Then();
1962  {
1963    // Use global Number function.
1964    Push(Add<HConstant>(Context::NUMBER_FUNCTION_INDEX));
1965  }
1966  receiver_is_smi.Else();
1967  {
1968    // Determine {receiver} map and instance type.
1969    HValue* receiver_map =
1970        Add<HLoadNamedField>(receiver, nullptr, HObjectAccess::ForMap());
1971    HValue* receiver_instance_type = Add<HLoadNamedField>(
1972        receiver_map, nullptr, HObjectAccess::ForMapInstanceType());
1973
1974    // First check whether {receiver} is already a spec object (fast case).
1975    IfBuilder receiver_is_not_spec_object(this);
1976    receiver_is_not_spec_object.If<HCompareNumericAndBranch>(
1977        receiver_instance_type, Add<HConstant>(FIRST_JS_RECEIVER_TYPE),
1978        Token::LT);
1979    receiver_is_not_spec_object.Then();
1980    {
1981      // Load the constructor function index from the {receiver} map.
1982      HValue* constructor_function_index = Add<HLoadNamedField>(
1983          receiver_map, nullptr,
1984          HObjectAccess::ForMapInObjectPropertiesOrConstructorFunctionIndex());
1985
1986      // Check if {receiver} has a constructor (null and undefined have no
1987      // constructors, so we deoptimize to the runtime to throw an exception).
1988      IfBuilder constructor_function_index_is_invalid(this);
1989      constructor_function_index_is_invalid.If<HCompareNumericAndBranch>(
1990          constructor_function_index,
1991          Add<HConstant>(Map::kNoConstructorFunctionIndex), Token::EQ);
1992      constructor_function_index_is_invalid.ThenDeopt(
1993          DeoptimizeReason::kUndefinedOrNullInToObject);
1994      constructor_function_index_is_invalid.End();
1995
1996      // Use the global constructor function.
1997      Push(constructor_function_index);
1998    }
1999    receiver_is_not_spec_object.JoinContinuation(&wrap);
2000  }
2001  receiver_is_smi.JoinContinuation(&wrap);
2002
2003  // Wrap the receiver if necessary.
2004  IfBuilder if_wrap(this, &wrap);
2005  if_wrap.Then();
2006  {
2007    // Grab the constructor function index.
2008    HValue* constructor_index = Pop();
2009
2010    // Load native context.
2011    HValue* native_context = BuildGetNativeContext();
2012
2013    // Determine the initial map for the global constructor.
2014    HValue* constructor = Add<HLoadKeyed>(native_context, constructor_index,
2015                                          nullptr, nullptr, FAST_ELEMENTS);
2016    HValue* constructor_initial_map = Add<HLoadNamedField>(
2017        constructor, nullptr, HObjectAccess::ForPrototypeOrInitialMap());
2018    // Allocate and initialize a JSValue wrapper.
2019    HValue* value =
2020        BuildAllocate(Add<HConstant>(JSValue::kSize), HType::JSObject(),
2021                      JS_VALUE_TYPE, HAllocationMode());
2022    Add<HStoreNamedField>(value, HObjectAccess::ForMap(),
2023                          constructor_initial_map);
2024    HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2025    Add<HStoreNamedField>(value, HObjectAccess::ForPropertiesPointer(),
2026                          empty_fixed_array);
2027    Add<HStoreNamedField>(value, HObjectAccess::ForElementsPointer(),
2028                          empty_fixed_array);
2029    Add<HStoreNamedField>(value, HObjectAccess::ForObservableJSObjectOffset(
2030                                     JSValue::kValueOffset),
2031                          receiver);
2032    Push(value);
2033  }
2034  if_wrap.Else();
2035  { Push(receiver); }
2036  if_wrap.End();
2037  return Pop();
2038}
2039
2040
2041HAllocate* HGraphBuilder::BuildAllocate(
2042    HValue* object_size,
2043    HType type,
2044    InstanceType instance_type,
2045    HAllocationMode allocation_mode) {
2046  // Compute the effective allocation size.
2047  HValue* size = object_size;
2048  if (allocation_mode.CreateAllocationMementos()) {
2049    size = AddUncasted<HAdd>(size, Add<HConstant>(AllocationMemento::kSize));
2050    size->ClearFlag(HValue::kCanOverflow);
2051  }
2052
2053  // Perform the actual allocation.
2054  HAllocate* object = Add<HAllocate>(
2055      size, type, allocation_mode.GetPretenureMode(), instance_type,
2056      graph()->GetConstant0(), allocation_mode.feedback_site());
2057
2058  // Setup the allocation memento.
2059  if (allocation_mode.CreateAllocationMementos()) {
2060    BuildCreateAllocationMemento(
2061        object, object_size, allocation_mode.current_site());
2062  }
2063
2064  return object;
2065}
2066
2067
2068HValue* HGraphBuilder::BuildAddStringLengths(HValue* left_length,
2069                                             HValue* right_length) {
2070  // Compute the combined string length and check against max string length.
2071  HValue* length = AddUncasted<HAdd>(left_length, right_length);
2072  // Check that length <= kMaxLength <=> length < MaxLength + 1.
2073  HValue* max_length = Add<HConstant>(String::kMaxLength + 1);
2074  if (top_info()->IsStub() || !isolate()->IsStringLengthOverflowIntact()) {
2075    // This is a mitigation for crbug.com/627934; the real fix
2076    // will be to migrate the StringAddStub to TurboFan one day.
2077    IfBuilder if_invalid(this);
2078    if_invalid.If<HCompareNumericAndBranch>(length, max_length, Token::GT);
2079    if_invalid.Then();
2080    {
2081      Add<HCallRuntime>(
2082          Runtime::FunctionForId(Runtime::kThrowInvalidStringLength), 0);
2083    }
2084    if_invalid.End();
2085  } else {
2086    graph()->MarkDependsOnStringLengthOverflow();
2087    Add<HBoundsCheck>(length, max_length);
2088  }
2089  return length;
2090}
2091
2092
2093HValue* HGraphBuilder::BuildCreateConsString(
2094    HValue* length,
2095    HValue* left,
2096    HValue* right,
2097    HAllocationMode allocation_mode) {
2098  // Determine the string instance types.
2099  HInstruction* left_instance_type = AddLoadStringInstanceType(left);
2100  HInstruction* right_instance_type = AddLoadStringInstanceType(right);
2101
2102  // Allocate the cons string object. HAllocate does not care whether we
2103  // pass CONS_STRING_TYPE or CONS_ONE_BYTE_STRING_TYPE here, so we just use
2104  // CONS_STRING_TYPE here. Below we decide whether the cons string is
2105  // one-byte or two-byte and set the appropriate map.
2106  DCHECK(HAllocate::CompatibleInstanceTypes(CONS_STRING_TYPE,
2107                                            CONS_ONE_BYTE_STRING_TYPE));
2108  HAllocate* result = BuildAllocate(Add<HConstant>(ConsString::kSize),
2109                                    HType::String(), CONS_STRING_TYPE,
2110                                    allocation_mode);
2111
2112  // Compute intersection and difference of instance types.
2113  HValue* anded_instance_types = AddUncasted<HBitwise>(
2114      Token::BIT_AND, left_instance_type, right_instance_type);
2115  HValue* xored_instance_types = AddUncasted<HBitwise>(
2116      Token::BIT_XOR, left_instance_type, right_instance_type);
2117
2118  // We create a one-byte cons string if
2119  // 1. both strings are one-byte, or
2120  // 2. at least one of the strings is two-byte, but happens to contain only
2121  //    one-byte characters.
2122  // To do this, we check
2123  // 1. if both strings are one-byte, or if the one-byte data hint is set in
2124  //    both strings, or
2125  // 2. if one of the strings has the one-byte data hint set and the other
2126  //    string is one-byte.
2127  IfBuilder if_onebyte(this);
2128  STATIC_ASSERT(kOneByteStringTag != 0);
2129  STATIC_ASSERT(kOneByteDataHintMask != 0);
2130  if_onebyte.If<HCompareNumericAndBranch>(
2131      AddUncasted<HBitwise>(
2132          Token::BIT_AND, anded_instance_types,
2133          Add<HConstant>(static_cast<int32_t>(
2134                  kStringEncodingMask | kOneByteDataHintMask))),
2135      graph()->GetConstant0(), Token::NE);
2136  if_onebyte.Or();
2137  STATIC_ASSERT(kOneByteStringTag != 0 &&
2138                kOneByteDataHintTag != 0 &&
2139                kOneByteDataHintTag != kOneByteStringTag);
2140  if_onebyte.If<HCompareNumericAndBranch>(
2141      AddUncasted<HBitwise>(
2142          Token::BIT_AND, xored_instance_types,
2143          Add<HConstant>(static_cast<int32_t>(
2144                  kOneByteStringTag | kOneByteDataHintTag))),
2145      Add<HConstant>(static_cast<int32_t>(
2146              kOneByteStringTag | kOneByteDataHintTag)), Token::EQ);
2147  if_onebyte.Then();
2148  {
2149    // We can safely skip the write barrier for storing the map here.
2150    Add<HStoreNamedField>(
2151        result, HObjectAccess::ForMap(),
2152        Add<HConstant>(isolate()->factory()->cons_one_byte_string_map()));
2153  }
2154  if_onebyte.Else();
2155  {
2156    // We can safely skip the write barrier for storing the map here.
2157    Add<HStoreNamedField>(
2158        result, HObjectAccess::ForMap(),
2159        Add<HConstant>(isolate()->factory()->cons_string_map()));
2160  }
2161  if_onebyte.End();
2162
2163  // Initialize the cons string fields.
2164  Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2165                        Add<HConstant>(String::kEmptyHashField));
2166  Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2167  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringFirst(), left);
2168  Add<HStoreNamedField>(result, HObjectAccess::ForConsStringSecond(), right);
2169
2170  // Count the native string addition.
2171  AddIncrementCounter(isolate()->counters()->string_add_native());
2172
2173  return result;
2174}
2175
2176
2177void HGraphBuilder::BuildCopySeqStringChars(HValue* src,
2178                                            HValue* src_offset,
2179                                            String::Encoding src_encoding,
2180                                            HValue* dst,
2181                                            HValue* dst_offset,
2182                                            String::Encoding dst_encoding,
2183                                            HValue* length) {
2184  DCHECK(dst_encoding != String::ONE_BYTE_ENCODING ||
2185         src_encoding == String::ONE_BYTE_ENCODING);
2186  LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
2187  HValue* index = loop.BeginBody(graph()->GetConstant0(), length, Token::LT);
2188  {
2189    HValue* src_index = AddUncasted<HAdd>(src_offset, index);
2190    HValue* value =
2191        AddUncasted<HSeqStringGetChar>(src_encoding, src, src_index);
2192    HValue* dst_index = AddUncasted<HAdd>(dst_offset, index);
2193    Add<HSeqStringSetChar>(dst_encoding, dst, dst_index, value);
2194  }
2195  loop.EndBody();
2196}
2197
2198
2199HValue* HGraphBuilder::BuildObjectSizeAlignment(
2200    HValue* unaligned_size, int header_size) {
2201  DCHECK((header_size & kObjectAlignmentMask) == 0);
2202  HValue* size = AddUncasted<HAdd>(
2203      unaligned_size, Add<HConstant>(static_cast<int32_t>(
2204          header_size + kObjectAlignmentMask)));
2205  size->ClearFlag(HValue::kCanOverflow);
2206  return AddUncasted<HBitwise>(
2207      Token::BIT_AND, size, Add<HConstant>(static_cast<int32_t>(
2208          ~kObjectAlignmentMask)));
2209}
2210
2211
2212HValue* HGraphBuilder::BuildUncheckedStringAdd(
2213    HValue* left,
2214    HValue* right,
2215    HAllocationMode allocation_mode) {
2216  // Determine the string lengths.
2217  HValue* left_length = AddLoadStringLength(left);
2218  HValue* right_length = AddLoadStringLength(right);
2219
2220  // Compute the combined string length.
2221  HValue* length = BuildAddStringLengths(left_length, right_length);
2222
2223  // Do some manual constant folding here.
2224  if (left_length->IsConstant()) {
2225    HConstant* c_left_length = HConstant::cast(left_length);
2226    DCHECK_NE(0, c_left_length->Integer32Value());
2227    if (c_left_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2228      // The right string contains at least one character.
2229      return BuildCreateConsString(length, left, right, allocation_mode);
2230    }
2231  } else if (right_length->IsConstant()) {
2232    HConstant* c_right_length = HConstant::cast(right_length);
2233    DCHECK_NE(0, c_right_length->Integer32Value());
2234    if (c_right_length->Integer32Value() + 1 >= ConsString::kMinLength) {
2235      // The left string contains at least one character.
2236      return BuildCreateConsString(length, left, right, allocation_mode);
2237    }
2238  }
2239
2240  // Check if we should create a cons string.
2241  IfBuilder if_createcons(this);
2242  if_createcons.If<HCompareNumericAndBranch>(
2243      length, Add<HConstant>(ConsString::kMinLength), Token::GTE);
2244  if_createcons.And();
2245  if_createcons.If<HCompareNumericAndBranch>(
2246      length, Add<HConstant>(ConsString::kMaxLength), Token::LTE);
2247  if_createcons.Then();
2248  {
2249    // Create a cons string.
2250    Push(BuildCreateConsString(length, left, right, allocation_mode));
2251  }
2252  if_createcons.Else();
2253  {
2254    // Determine the string instance types.
2255    HValue* left_instance_type = AddLoadStringInstanceType(left);
2256    HValue* right_instance_type = AddLoadStringInstanceType(right);
2257
2258    // Compute union and difference of instance types.
2259    HValue* ored_instance_types = AddUncasted<HBitwise>(
2260        Token::BIT_OR, left_instance_type, right_instance_type);
2261    HValue* xored_instance_types = AddUncasted<HBitwise>(
2262        Token::BIT_XOR, left_instance_type, right_instance_type);
2263
2264    // Check if both strings have the same encoding and both are
2265    // sequential.
2266    IfBuilder if_sameencodingandsequential(this);
2267    if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2268        AddUncasted<HBitwise>(
2269            Token::BIT_AND, xored_instance_types,
2270            Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2271        graph()->GetConstant0(), Token::EQ);
2272    if_sameencodingandsequential.And();
2273    STATIC_ASSERT(kSeqStringTag == 0);
2274    if_sameencodingandsequential.If<HCompareNumericAndBranch>(
2275        AddUncasted<HBitwise>(
2276            Token::BIT_AND, ored_instance_types,
2277            Add<HConstant>(static_cast<int32_t>(kStringRepresentationMask))),
2278        graph()->GetConstant0(), Token::EQ);
2279    if_sameencodingandsequential.Then();
2280    {
2281      HConstant* string_map =
2282          Add<HConstant>(isolate()->factory()->string_map());
2283      HConstant* one_byte_string_map =
2284          Add<HConstant>(isolate()->factory()->one_byte_string_map());
2285
2286      // Determine map and size depending on whether result is one-byte string.
2287      IfBuilder if_onebyte(this);
2288      STATIC_ASSERT(kOneByteStringTag != 0);
2289      if_onebyte.If<HCompareNumericAndBranch>(
2290          AddUncasted<HBitwise>(
2291              Token::BIT_AND, ored_instance_types,
2292              Add<HConstant>(static_cast<int32_t>(kStringEncodingMask))),
2293          graph()->GetConstant0(), Token::NE);
2294      if_onebyte.Then();
2295      {
2296        // Allocate sequential one-byte string object.
2297        Push(length);
2298        Push(one_byte_string_map);
2299      }
2300      if_onebyte.Else();
2301      {
2302        // Allocate sequential two-byte string object.
2303        HValue* size = AddUncasted<HShl>(length, graph()->GetConstant1());
2304        size->ClearFlag(HValue::kCanOverflow);
2305        size->SetFlag(HValue::kUint32);
2306        Push(size);
2307        Push(string_map);
2308      }
2309      if_onebyte.End();
2310      HValue* map = Pop();
2311
2312      // Calculate the number of bytes needed for the characters in the
2313      // string while observing object alignment.
2314      STATIC_ASSERT((SeqString::kHeaderSize & kObjectAlignmentMask) == 0);
2315      HValue* size = BuildObjectSizeAlignment(Pop(), SeqString::kHeaderSize);
2316
2317      IfBuilder if_size(this);
2318      if_size.If<HCompareNumericAndBranch>(
2319          size, Add<HConstant>(kMaxRegularHeapObjectSize), Token::LT);
2320      if_size.Then();
2321      {
2322        // Allocate the string object. HAllocate does not care whether we pass
2323        // STRING_TYPE or ONE_BYTE_STRING_TYPE here, so we just use STRING_TYPE.
2324        HAllocate* result =
2325            BuildAllocate(size, HType::String(), STRING_TYPE, allocation_mode);
2326        Add<HStoreNamedField>(result, HObjectAccess::ForMap(), map);
2327
2328        // Initialize the string fields.
2329        Add<HStoreNamedField>(result, HObjectAccess::ForStringHashField(),
2330                              Add<HConstant>(String::kEmptyHashField));
2331        Add<HStoreNamedField>(result, HObjectAccess::ForStringLength(), length);
2332
2333        // Copy characters to the result string.
2334        IfBuilder if_twobyte(this);
2335        if_twobyte.If<HCompareObjectEqAndBranch>(map, string_map);
2336        if_twobyte.Then();
2337        {
2338          // Copy characters from the left string.
2339          BuildCopySeqStringChars(
2340              left, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2341              graph()->GetConstant0(), String::TWO_BYTE_ENCODING, left_length);
2342
2343          // Copy characters from the right string.
2344          BuildCopySeqStringChars(
2345              right, graph()->GetConstant0(), String::TWO_BYTE_ENCODING, result,
2346              left_length, String::TWO_BYTE_ENCODING, right_length);
2347        }
2348        if_twobyte.Else();
2349        {
2350          // Copy characters from the left string.
2351          BuildCopySeqStringChars(
2352              left, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2353              graph()->GetConstant0(), String::ONE_BYTE_ENCODING, left_length);
2354
2355          // Copy characters from the right string.
2356          BuildCopySeqStringChars(
2357              right, graph()->GetConstant0(), String::ONE_BYTE_ENCODING, result,
2358              left_length, String::ONE_BYTE_ENCODING, right_length);
2359        }
2360        if_twobyte.End();
2361
2362        // Count the native string addition.
2363        AddIncrementCounter(isolate()->counters()->string_add_native());
2364
2365        // Return the sequential string.
2366        Push(result);
2367      }
2368      if_size.Else();
2369      {
2370        // Fallback to the runtime to add the two strings. The string has to be
2371        // allocated in LO space.
2372        Add<HPushArguments>(left, right);
2373        Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2374      }
2375      if_size.End();
2376    }
2377    if_sameencodingandsequential.Else();
2378    {
2379      // Fallback to the runtime to add the two strings.
2380      Add<HPushArguments>(left, right);
2381      Push(Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kStringAdd), 2));
2382    }
2383    if_sameencodingandsequential.End();
2384  }
2385  if_createcons.End();
2386
2387  return Pop();
2388}
2389
2390
2391HValue* HGraphBuilder::BuildStringAdd(
2392    HValue* left,
2393    HValue* right,
2394    HAllocationMode allocation_mode) {
2395  NoObservableSideEffectsScope no_effects(this);
2396
2397  // Determine string lengths.
2398  HValue* left_length = AddLoadStringLength(left);
2399  HValue* right_length = AddLoadStringLength(right);
2400
2401  // Check if left string is empty.
2402  IfBuilder if_leftempty(this);
2403  if_leftempty.If<HCompareNumericAndBranch>(
2404      left_length, graph()->GetConstant0(), Token::EQ);
2405  if_leftempty.Then();
2406  {
2407    // Count the native string addition.
2408    AddIncrementCounter(isolate()->counters()->string_add_native());
2409
2410    // Just return the right string.
2411    Push(right);
2412  }
2413  if_leftempty.Else();
2414  {
2415    // Check if right string is empty.
2416    IfBuilder if_rightempty(this);
2417    if_rightempty.If<HCompareNumericAndBranch>(
2418        right_length, graph()->GetConstant0(), Token::EQ);
2419    if_rightempty.Then();
2420    {
2421      // Count the native string addition.
2422      AddIncrementCounter(isolate()->counters()->string_add_native());
2423
2424      // Just return the left string.
2425      Push(left);
2426    }
2427    if_rightempty.Else();
2428    {
2429      // Add the two non-empty strings.
2430      Push(BuildUncheckedStringAdd(left, right, allocation_mode));
2431    }
2432    if_rightempty.End();
2433  }
2434  if_leftempty.End();
2435
2436  return Pop();
2437}
2438
2439
2440HInstruction* HGraphBuilder::BuildUncheckedMonomorphicElementAccess(
2441    HValue* checked_object,
2442    HValue* key,
2443    HValue* val,
2444    bool is_js_array,
2445    ElementsKind elements_kind,
2446    PropertyAccessType access_type,
2447    LoadKeyedHoleMode load_mode,
2448    KeyedAccessStoreMode store_mode) {
2449  DCHECK(top_info()->IsStub() || checked_object->IsCompareMap() ||
2450         checked_object->IsCheckMaps());
2451  DCHECK(!IsFixedTypedArrayElementsKind(elements_kind) || !is_js_array);
2452  // No GVNFlag is necessary for ElementsKind if there is an explicit dependency
2453  // on a HElementsTransition instruction. The flag can also be removed if the
2454  // map to check has FAST_HOLEY_ELEMENTS, since there can be no further
2455  // ElementsKind transitions. Finally, the dependency can be removed for stores
2456  // for FAST_ELEMENTS, since a transition to HOLEY elements won't change the
2457  // generated store code.
2458  if ((elements_kind == FAST_HOLEY_ELEMENTS) ||
2459      (elements_kind == FAST_ELEMENTS && access_type == STORE)) {
2460    checked_object->ClearDependsOnFlag(kElementsKind);
2461  }
2462
2463  bool fast_smi_only_elements = IsFastSmiElementsKind(elements_kind);
2464  bool fast_elements = IsFastObjectElementsKind(elements_kind);
2465  HValue* elements = AddLoadElements(checked_object);
2466  if (access_type == STORE && (fast_elements || fast_smi_only_elements) &&
2467      store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
2468    HCheckMaps* check_cow_map = Add<HCheckMaps>(
2469        elements, isolate()->factory()->fixed_array_map());
2470    check_cow_map->ClearDependsOnFlag(kElementsKind);
2471  }
2472  HInstruction* length = NULL;
2473  if (is_js_array) {
2474    length = Add<HLoadNamedField>(
2475        checked_object->ActualValue(), checked_object,
2476        HObjectAccess::ForArrayLength(elements_kind));
2477  } else {
2478    length = AddLoadFixedArrayLength(elements);
2479  }
2480  length->set_type(HType::Smi());
2481  HValue* checked_key = NULL;
2482  if (IsFixedTypedArrayElementsKind(elements_kind)) {
2483    checked_object = Add<HCheckArrayBufferNotNeutered>(checked_object);
2484
2485    HValue* external_pointer = Add<HLoadNamedField>(
2486        elements, nullptr,
2487        HObjectAccess::ForFixedTypedArrayBaseExternalPointer());
2488    HValue* base_pointer = Add<HLoadNamedField>(
2489        elements, nullptr, HObjectAccess::ForFixedTypedArrayBaseBasePointer());
2490    HValue* backing_store = AddUncasted<HAdd>(external_pointer, base_pointer,
2491                                              AddOfExternalAndTagged);
2492
2493    if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
2494      NoObservableSideEffectsScope no_effects(this);
2495      IfBuilder length_checker(this);
2496      length_checker.If<HCompareNumericAndBranch>(key, length, Token::LT);
2497      length_checker.Then();
2498      IfBuilder negative_checker(this);
2499      HValue* bounds_check = negative_checker.If<HCompareNumericAndBranch>(
2500          key, graph()->GetConstant0(), Token::GTE);
2501      negative_checker.Then();
2502      HInstruction* result = AddElementAccess(
2503          backing_store, key, val, bounds_check, checked_object->ActualValue(),
2504          elements_kind, access_type);
2505      negative_checker.ElseDeopt(DeoptimizeReason::kNegativeKeyEncountered);
2506      negative_checker.End();
2507      length_checker.End();
2508      return result;
2509    } else {
2510      DCHECK(store_mode == STANDARD_STORE);
2511      checked_key = Add<HBoundsCheck>(key, length);
2512      return AddElementAccess(backing_store, checked_key, val, checked_object,
2513                              checked_object->ActualValue(), elements_kind,
2514                              access_type);
2515    }
2516  }
2517  DCHECK(fast_smi_only_elements ||
2518         fast_elements ||
2519         IsFastDoubleElementsKind(elements_kind));
2520
2521  // In case val is stored into a fast smi array, assure that the value is a smi
2522  // before manipulating the backing store. Otherwise the actual store may
2523  // deopt, leaving the backing store in an invalid state.
2524  if (access_type == STORE && IsFastSmiElementsKind(elements_kind) &&
2525      !val->type().IsSmi()) {
2526    val = AddUncasted<HForceRepresentation>(val, Representation::Smi());
2527  }
2528
2529  if (IsGrowStoreMode(store_mode)) {
2530    NoObservableSideEffectsScope no_effects(this);
2531    Representation representation = HStoreKeyed::RequiredValueRepresentation(
2532        elements_kind, STORE_TO_INITIALIZED_ENTRY);
2533    val = AddUncasted<HForceRepresentation>(val, representation);
2534    elements = BuildCheckForCapacityGrow(checked_object, elements,
2535                                         elements_kind, length, key,
2536                                         is_js_array, access_type);
2537    checked_key = key;
2538  } else {
2539    checked_key = Add<HBoundsCheck>(key, length);
2540
2541    if (access_type == STORE && (fast_elements || fast_smi_only_elements)) {
2542      if (store_mode == STORE_NO_TRANSITION_HANDLE_COW) {
2543        NoObservableSideEffectsScope no_effects(this);
2544        elements = BuildCopyElementsOnWrite(checked_object, elements,
2545                                            elements_kind, length);
2546      } else {
2547        HCheckMaps* check_cow_map = Add<HCheckMaps>(
2548            elements, isolate()->factory()->fixed_array_map());
2549        check_cow_map->ClearDependsOnFlag(kElementsKind);
2550      }
2551    }
2552  }
2553  return AddElementAccess(elements, checked_key, val, checked_object, nullptr,
2554                          elements_kind, access_type, load_mode);
2555}
2556
2557
2558HValue* HGraphBuilder::BuildCalculateElementsSize(ElementsKind kind,
2559                                                  HValue* capacity) {
2560  int elements_size = IsFastDoubleElementsKind(kind)
2561      ? kDoubleSize
2562      : kPointerSize;
2563
2564  HConstant* elements_size_value = Add<HConstant>(elements_size);
2565  HInstruction* mul =
2566      HMul::NewImul(isolate(), zone(), context(), capacity->ActualValue(),
2567                    elements_size_value);
2568  AddInstruction(mul);
2569  mul->ClearFlag(HValue::kCanOverflow);
2570
2571  STATIC_ASSERT(FixedDoubleArray::kHeaderSize == FixedArray::kHeaderSize);
2572
2573  HConstant* header_size = Add<HConstant>(FixedArray::kHeaderSize);
2574  HValue* total_size = AddUncasted<HAdd>(mul, header_size);
2575  total_size->ClearFlag(HValue::kCanOverflow);
2576  return total_size;
2577}
2578
2579
2580HAllocate* HGraphBuilder::AllocateJSArrayObject(AllocationSiteMode mode) {
2581  int base_size = JSArray::kSize;
2582  if (mode == TRACK_ALLOCATION_SITE) {
2583    base_size += AllocationMemento::kSize;
2584  }
2585  HConstant* size_in_bytes = Add<HConstant>(base_size);
2586  return Add<HAllocate>(size_in_bytes, HType::JSArray(), NOT_TENURED,
2587                        JS_OBJECT_TYPE, graph()->GetConstant0());
2588}
2589
2590
2591HConstant* HGraphBuilder::EstablishElementsAllocationSize(
2592    ElementsKind kind,
2593    int capacity) {
2594  int base_size = IsFastDoubleElementsKind(kind)
2595      ? FixedDoubleArray::SizeFor(capacity)
2596      : FixedArray::SizeFor(capacity);
2597
2598  return Add<HConstant>(base_size);
2599}
2600
2601
2602HAllocate* HGraphBuilder::BuildAllocateElements(ElementsKind kind,
2603                                                HValue* size_in_bytes) {
2604  InstanceType instance_type = IsFastDoubleElementsKind(kind)
2605      ? FIXED_DOUBLE_ARRAY_TYPE
2606      : FIXED_ARRAY_TYPE;
2607
2608  return Add<HAllocate>(size_in_bytes, HType::HeapObject(), NOT_TENURED,
2609                        instance_type, graph()->GetConstant0());
2610}
2611
2612
2613void HGraphBuilder::BuildInitializeElementsHeader(HValue* elements,
2614                                                  ElementsKind kind,
2615                                                  HValue* capacity) {
2616  Factory* factory = isolate()->factory();
2617  Handle<Map> map = IsFastDoubleElementsKind(kind)
2618      ? factory->fixed_double_array_map()
2619      : factory->fixed_array_map();
2620
2621  Add<HStoreNamedField>(elements, HObjectAccess::ForMap(), Add<HConstant>(map));
2622  Add<HStoreNamedField>(elements, HObjectAccess::ForFixedArrayLength(),
2623                        capacity);
2624}
2625
2626
2627HValue* HGraphBuilder::BuildAllocateAndInitializeArray(ElementsKind kind,
2628                                                       HValue* capacity) {
2629  // The HForceRepresentation is to prevent possible deopt on int-smi
2630  // conversion after allocation but before the new object fields are set.
2631  capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
2632  HValue* size_in_bytes = BuildCalculateElementsSize(kind, capacity);
2633  HValue* new_array = BuildAllocateElements(kind, size_in_bytes);
2634  BuildInitializeElementsHeader(new_array, kind, capacity);
2635  return new_array;
2636}
2637
2638
2639void HGraphBuilder::BuildJSArrayHeader(HValue* array,
2640                                       HValue* array_map,
2641                                       HValue* elements,
2642                                       AllocationSiteMode mode,
2643                                       ElementsKind elements_kind,
2644                                       HValue* allocation_site_payload,
2645                                       HValue* length_field) {
2646  Add<HStoreNamedField>(array, HObjectAccess::ForMap(), array_map);
2647
2648  HValue* empty_fixed_array = Add<HLoadRoot>(Heap::kEmptyFixedArrayRootIndex);
2649
2650  Add<HStoreNamedField>(
2651      array, HObjectAccess::ForPropertiesPointer(), empty_fixed_array);
2652
2653  Add<HStoreNamedField>(array, HObjectAccess::ForElementsPointer(),
2654                        elements != nullptr ? elements : empty_fixed_array);
2655
2656  Add<HStoreNamedField>(
2657      array, HObjectAccess::ForArrayLength(elements_kind), length_field);
2658
2659  if (mode == TRACK_ALLOCATION_SITE) {
2660    BuildCreateAllocationMemento(
2661        array, Add<HConstant>(JSArray::kSize), allocation_site_payload);
2662  }
2663}
2664
2665
2666HInstruction* HGraphBuilder::AddElementAccess(
2667    HValue* elements, HValue* checked_key, HValue* val, HValue* dependency,
2668    HValue* backing_store_owner, ElementsKind elements_kind,
2669    PropertyAccessType access_type, LoadKeyedHoleMode load_mode) {
2670  if (access_type == STORE) {
2671    DCHECK(val != NULL);
2672    if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
2673      val = Add<HClampToUint8>(val);
2674    }
2675    return Add<HStoreKeyed>(elements, checked_key, val, backing_store_owner,
2676                            elements_kind, STORE_TO_INITIALIZED_ENTRY);
2677  }
2678
2679  DCHECK(access_type == LOAD);
2680  DCHECK(val == NULL);
2681  HLoadKeyed* load =
2682      Add<HLoadKeyed>(elements, checked_key, dependency, backing_store_owner,
2683                      elements_kind, load_mode);
2684  if (elements_kind == UINT32_ELEMENTS) {
2685    graph()->RecordUint32Instruction(load);
2686  }
2687  return load;
2688}
2689
2690
2691HLoadNamedField* HGraphBuilder::AddLoadMap(HValue* object,
2692                                           HValue* dependency) {
2693  return Add<HLoadNamedField>(object, dependency, HObjectAccess::ForMap());
2694}
2695
2696
2697HLoadNamedField* HGraphBuilder::AddLoadElements(HValue* object,
2698                                                HValue* dependency) {
2699  return Add<HLoadNamedField>(
2700      object, dependency, HObjectAccess::ForElementsPointer());
2701}
2702
2703
2704HLoadNamedField* HGraphBuilder::AddLoadFixedArrayLength(
2705    HValue* array,
2706    HValue* dependency) {
2707  return Add<HLoadNamedField>(
2708      array, dependency, HObjectAccess::ForFixedArrayLength());
2709}
2710
2711
2712HLoadNamedField* HGraphBuilder::AddLoadArrayLength(HValue* array,
2713                                                   ElementsKind kind,
2714                                                   HValue* dependency) {
2715  return Add<HLoadNamedField>(
2716      array, dependency, HObjectAccess::ForArrayLength(kind));
2717}
2718
2719
2720HValue* HGraphBuilder::BuildNewElementsCapacity(HValue* old_capacity) {
2721  HValue* half_old_capacity = AddUncasted<HShr>(old_capacity,
2722                                                graph_->GetConstant1());
2723
2724  HValue* new_capacity = AddUncasted<HAdd>(half_old_capacity, old_capacity);
2725  new_capacity->ClearFlag(HValue::kCanOverflow);
2726
2727  HValue* min_growth = Add<HConstant>(16);
2728
2729  new_capacity = AddUncasted<HAdd>(new_capacity, min_growth);
2730  new_capacity->ClearFlag(HValue::kCanOverflow);
2731
2732  return new_capacity;
2733}
2734
2735
2736HValue* HGraphBuilder::BuildGrowElementsCapacity(HValue* object,
2737                                                 HValue* elements,
2738                                                 ElementsKind kind,
2739                                                 ElementsKind new_kind,
2740                                                 HValue* length,
2741                                                 HValue* new_capacity) {
2742  Add<HBoundsCheck>(
2743      new_capacity,
2744      Add<HConstant>((kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >>
2745                     ElementsKindToShiftSize(new_kind)));
2746
2747  HValue* new_elements =
2748      BuildAllocateAndInitializeArray(new_kind, new_capacity);
2749
2750  BuildCopyElements(elements, kind, new_elements,
2751                    new_kind, length, new_capacity);
2752
2753  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
2754                        new_elements);
2755
2756  return new_elements;
2757}
2758
2759
2760void HGraphBuilder::BuildFillElementsWithValue(HValue* elements,
2761                                               ElementsKind elements_kind,
2762                                               HValue* from,
2763                                               HValue* to,
2764                                               HValue* value) {
2765  if (to == NULL) {
2766    to = AddLoadFixedArrayLength(elements);
2767  }
2768
2769  // Special loop unfolding case
2770  STATIC_ASSERT(JSArray::kPreallocatedArrayElements <=
2771                kElementLoopUnrollThreshold);
2772  int initial_capacity = -1;
2773  if (from->IsInteger32Constant() && to->IsInteger32Constant()) {
2774    int constant_from = from->GetInteger32Constant();
2775    int constant_to = to->GetInteger32Constant();
2776
2777    if (constant_from == 0 && constant_to <= kElementLoopUnrollThreshold) {
2778      initial_capacity = constant_to;
2779    }
2780  }
2781
2782  if (initial_capacity >= 0) {
2783    for (int i = 0; i < initial_capacity; i++) {
2784      HInstruction* key = Add<HConstant>(i);
2785      Add<HStoreKeyed>(elements, key, value, nullptr, elements_kind);
2786    }
2787  } else {
2788    // Carefully loop backwards so that the "from" remains live through the loop
2789    // rather than the to. This often corresponds to keeping length live rather
2790    // then capacity, which helps register allocation, since length is used more
2791    // other than capacity after filling with holes.
2792    LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2793
2794    HValue* key = builder.BeginBody(to, from, Token::GT);
2795
2796    HValue* adjusted_key = AddUncasted<HSub>(key, graph()->GetConstant1());
2797    adjusted_key->ClearFlag(HValue::kCanOverflow);
2798
2799    Add<HStoreKeyed>(elements, adjusted_key, value, nullptr, elements_kind);
2800
2801    builder.EndBody();
2802  }
2803}
2804
2805
2806void HGraphBuilder::BuildFillElementsWithHole(HValue* elements,
2807                                              ElementsKind elements_kind,
2808                                              HValue* from,
2809                                              HValue* to) {
2810  // Fast elements kinds need to be initialized in case statements below cause a
2811  // garbage collection.
2812
2813  HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
2814                     ? graph()->GetConstantHole()
2815                     : Add<HConstant>(HConstant::kHoleNaN);
2816
2817  // Since we're about to store a hole value, the store instruction below must
2818  // assume an elements kind that supports heap object values.
2819  if (IsFastSmiOrObjectElementsKind(elements_kind)) {
2820    elements_kind = FAST_HOLEY_ELEMENTS;
2821  }
2822
2823  BuildFillElementsWithValue(elements, elements_kind, from, to, hole);
2824}
2825
2826
2827void HGraphBuilder::BuildCopyProperties(HValue* from_properties,
2828                                        HValue* to_properties, HValue* length,
2829                                        HValue* capacity) {
2830  ElementsKind kind = FAST_ELEMENTS;
2831
2832  BuildFillElementsWithValue(to_properties, kind, length, capacity,
2833                             graph()->GetConstantUndefined());
2834
2835  LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2836
2837  HValue* key = builder.BeginBody(length, graph()->GetConstant0(), Token::GT);
2838
2839  key = AddUncasted<HSub>(key, graph()->GetConstant1());
2840  key->ClearFlag(HValue::kCanOverflow);
2841
2842  HValue* element =
2843      Add<HLoadKeyed>(from_properties, key, nullptr, nullptr, kind);
2844
2845  Add<HStoreKeyed>(to_properties, key, element, nullptr, kind);
2846
2847  builder.EndBody();
2848}
2849
2850
2851void HGraphBuilder::BuildCopyElements(HValue* from_elements,
2852                                      ElementsKind from_elements_kind,
2853                                      HValue* to_elements,
2854                                      ElementsKind to_elements_kind,
2855                                      HValue* length,
2856                                      HValue* capacity) {
2857  int constant_capacity = -1;
2858  if (capacity != NULL &&
2859      capacity->IsConstant() &&
2860      HConstant::cast(capacity)->HasInteger32Value()) {
2861    int constant_candidate = HConstant::cast(capacity)->Integer32Value();
2862    if (constant_candidate <= kElementLoopUnrollThreshold) {
2863      constant_capacity = constant_candidate;
2864    }
2865  }
2866
2867  bool pre_fill_with_holes =
2868    IsFastDoubleElementsKind(from_elements_kind) &&
2869    IsFastObjectElementsKind(to_elements_kind);
2870  if (pre_fill_with_holes) {
2871    // If the copy might trigger a GC, make sure that the FixedArray is
2872    // pre-initialized with holes to make sure that it's always in a
2873    // consistent state.
2874    BuildFillElementsWithHole(to_elements, to_elements_kind,
2875                              graph()->GetConstant0(), NULL);
2876  }
2877
2878  if (constant_capacity != -1) {
2879    // Unroll the loop for small elements kinds.
2880    for (int i = 0; i < constant_capacity; i++) {
2881      HValue* key_constant = Add<HConstant>(i);
2882      HInstruction* value = Add<HLoadKeyed>(
2883          from_elements, key_constant, nullptr, nullptr, from_elements_kind);
2884      Add<HStoreKeyed>(to_elements, key_constant, value, nullptr,
2885                       to_elements_kind);
2886    }
2887  } else {
2888    if (!pre_fill_with_holes &&
2889        (capacity == NULL || !length->Equals(capacity))) {
2890      BuildFillElementsWithHole(to_elements, to_elements_kind,
2891                                length, NULL);
2892    }
2893
2894    LoopBuilder builder(this, context(), LoopBuilder::kPostDecrement);
2895
2896    HValue* key = builder.BeginBody(length, graph()->GetConstant0(),
2897                                    Token::GT);
2898
2899    key = AddUncasted<HSub>(key, graph()->GetConstant1());
2900    key->ClearFlag(HValue::kCanOverflow);
2901
2902    HValue* element = Add<HLoadKeyed>(from_elements, key, nullptr, nullptr,
2903                                      from_elements_kind, ALLOW_RETURN_HOLE);
2904
2905    ElementsKind kind = (IsHoleyElementsKind(from_elements_kind) &&
2906                         IsFastSmiElementsKind(to_elements_kind))
2907      ? FAST_HOLEY_ELEMENTS : to_elements_kind;
2908
2909    if (IsHoleyElementsKind(from_elements_kind) &&
2910        from_elements_kind != to_elements_kind) {
2911      IfBuilder if_hole(this);
2912      if_hole.If<HCompareHoleAndBranch>(element);
2913      if_hole.Then();
2914      HConstant* hole_constant = IsFastDoubleElementsKind(to_elements_kind)
2915                                     ? Add<HConstant>(HConstant::kHoleNaN)
2916                                     : graph()->GetConstantHole();
2917      Add<HStoreKeyed>(to_elements, key, hole_constant, nullptr, kind);
2918      if_hole.Else();
2919      HStoreKeyed* store =
2920          Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
2921      store->SetFlag(HValue::kTruncatingToNumber);
2922      if_hole.End();
2923    } else {
2924      HStoreKeyed* store =
2925          Add<HStoreKeyed>(to_elements, key, element, nullptr, kind);
2926      store->SetFlag(HValue::kTruncatingToNumber);
2927    }
2928
2929    builder.EndBody();
2930  }
2931
2932  Counters* counters = isolate()->counters();
2933  AddIncrementCounter(counters->inlined_copied_elements());
2934}
2935
2936void HGraphBuilder::BuildCreateAllocationMemento(
2937    HValue* previous_object,
2938    HValue* previous_object_size,
2939    HValue* allocation_site) {
2940  DCHECK(allocation_site != NULL);
2941  HInnerAllocatedObject* allocation_memento = Add<HInnerAllocatedObject>(
2942      previous_object, previous_object_size, HType::HeapObject());
2943  AddStoreMapConstant(
2944      allocation_memento, isolate()->factory()->allocation_memento_map());
2945  Add<HStoreNamedField>(
2946      allocation_memento,
2947      HObjectAccess::ForAllocationMementoSite(),
2948      allocation_site);
2949  if (FLAG_allocation_site_pretenuring) {
2950    HValue* memento_create_count =
2951        Add<HLoadNamedField>(allocation_site, nullptr,
2952                             HObjectAccess::ForAllocationSiteOffset(
2953                                 AllocationSite::kPretenureCreateCountOffset));
2954    memento_create_count = AddUncasted<HAdd>(
2955        memento_create_count, graph()->GetConstant1());
2956    // This smi value is reset to zero after every gc, overflow isn't a problem
2957    // since the counter is bounded by the new space size.
2958    memento_create_count->ClearFlag(HValue::kCanOverflow);
2959    Add<HStoreNamedField>(
2960        allocation_site, HObjectAccess::ForAllocationSiteOffset(
2961            AllocationSite::kPretenureCreateCountOffset), memento_create_count);
2962  }
2963}
2964
2965
2966HInstruction* HGraphBuilder::BuildGetNativeContext() {
2967  return Add<HLoadNamedField>(
2968      context(), nullptr,
2969      HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2970}
2971
2972
2973HInstruction* HGraphBuilder::BuildGetNativeContext(HValue* closure) {
2974  // Get the global object, then the native context
2975  HInstruction* context = Add<HLoadNamedField>(
2976      closure, nullptr, HObjectAccess::ForFunctionContextPointer());
2977  return Add<HLoadNamedField>(
2978      context, nullptr,
2979      HObjectAccess::ForContextSlot(Context::NATIVE_CONTEXT_INDEX));
2980}
2981
2982
2983HValue* HGraphBuilder::BuildGetParentContext(HValue* depth, int depth_value) {
2984  HValue* script_context = context();
2985  if (depth != NULL) {
2986    HValue* zero = graph()->GetConstant0();
2987
2988    Push(script_context);
2989    Push(depth);
2990
2991    LoopBuilder loop(this);
2992    loop.BeginBody(2);  // Drop script_context and depth from last environment
2993                        // to appease live range building without simulates.
2994    depth = Pop();
2995    script_context = Pop();
2996
2997    script_context = Add<HLoadNamedField>(
2998        script_context, nullptr,
2999        HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3000    depth = AddUncasted<HSub>(depth, graph()->GetConstant1());
3001    depth->ClearFlag(HValue::kCanOverflow);
3002
3003    IfBuilder if_break(this);
3004    if_break.If<HCompareNumericAndBranch, HValue*>(depth, zero, Token::EQ);
3005    if_break.Then();
3006    {
3007      Push(script_context);  // The result.
3008      loop.Break();
3009    }
3010    if_break.Else();
3011    {
3012      Push(script_context);
3013      Push(depth);
3014    }
3015    loop.EndBody();
3016    if_break.End();
3017
3018    script_context = Pop();
3019  } else if (depth_value > 0) {
3020    // Unroll the above loop.
3021    for (int i = 0; i < depth_value; i++) {
3022      script_context = Add<HLoadNamedField>(
3023          script_context, nullptr,
3024          HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
3025    }
3026  }
3027  return script_context;
3028}
3029
3030
3031HInstruction* HGraphBuilder::BuildGetArrayFunction() {
3032  HInstruction* native_context = BuildGetNativeContext();
3033  HInstruction* index =
3034      Add<HConstant>(static_cast<int32_t>(Context::ARRAY_FUNCTION_INDEX));
3035  return Add<HLoadKeyed>(native_context, index, nullptr, nullptr,
3036                         FAST_ELEMENTS);
3037}
3038
3039
3040HValue* HGraphBuilder::BuildArrayBufferViewFieldAccessor(HValue* object,
3041                                                         HValue* checked_object,
3042                                                         FieldIndex index) {
3043  NoObservableSideEffectsScope scope(this);
3044  HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(
3045      index.offset(), Representation::Tagged());
3046  HInstruction* buffer = Add<HLoadNamedField>(
3047      object, checked_object, HObjectAccess::ForJSArrayBufferViewBuffer());
3048  HInstruction* field = Add<HLoadNamedField>(object, checked_object, access);
3049
3050  HInstruction* flags = Add<HLoadNamedField>(
3051      buffer, nullptr, HObjectAccess::ForJSArrayBufferBitField());
3052  HValue* was_neutered_mask =
3053      Add<HConstant>(1 << JSArrayBuffer::WasNeutered::kShift);
3054  HValue* was_neutered_test =
3055      AddUncasted<HBitwise>(Token::BIT_AND, flags, was_neutered_mask);
3056
3057  IfBuilder if_was_neutered(this);
3058  if_was_neutered.If<HCompareNumericAndBranch>(
3059      was_neutered_test, graph()->GetConstant0(), Token::NE);
3060  if_was_neutered.Then();
3061  Push(graph()->GetConstant0());
3062  if_was_neutered.Else();
3063  Push(field);
3064  if_was_neutered.End();
3065
3066  return Pop();
3067}
3068
3069HValue* HGraphBuilder::AddLoadJSBuiltin(int context_index) {
3070  HValue* native_context = BuildGetNativeContext();
3071  HObjectAccess function_access = HObjectAccess::ForContextSlot(context_index);
3072  return Add<HLoadNamedField>(native_context, nullptr, function_access);
3073}
3074
3075HOptimizedGraphBuilder::HOptimizedGraphBuilder(CompilationInfo* info,
3076                                               bool track_positions)
3077    : HGraphBuilder(info, CallInterfaceDescriptor(), track_positions),
3078      function_state_(NULL),
3079      initial_function_state_(this, info, NORMAL_RETURN, -1,
3080                              TailCallMode::kAllow),
3081      ast_context_(NULL),
3082      break_scope_(NULL),
3083      inlined_count_(0),
3084      globals_(10, info->zone()),
3085      osr_(new (info->zone()) HOsrBuilder(this)),
3086      bounds_(info->zone()) {
3087  // This is not initialized in the initializer list because the
3088  // constructor for the initial state relies on function_state_ == NULL
3089  // to know it's the initial state.
3090  function_state_ = &initial_function_state_;
3091  InitializeAstVisitor(info->isolate());
3092}
3093
3094
3095HBasicBlock* HOptimizedGraphBuilder::CreateJoin(HBasicBlock* first,
3096                                                HBasicBlock* second,
3097                                                BailoutId join_id) {
3098  if (first == NULL) {
3099    return second;
3100  } else if (second == NULL) {
3101    return first;
3102  } else {
3103    HBasicBlock* join_block = graph()->CreateBasicBlock();
3104    Goto(first, join_block);
3105    Goto(second, join_block);
3106    join_block->SetJoinId(join_id);
3107    return join_block;
3108  }
3109}
3110
3111HBasicBlock* HOptimizedGraphBuilder::JoinContinue(IterationStatement* statement,
3112                                                  BailoutId continue_id,
3113                                                  HBasicBlock* exit_block,
3114                                                  HBasicBlock* continue_block) {
3115  if (continue_block != NULL) {
3116    if (exit_block != NULL) Goto(exit_block, continue_block);
3117    continue_block->SetJoinId(continue_id);
3118    return continue_block;
3119  }
3120  return exit_block;
3121}
3122
3123
3124HBasicBlock* HOptimizedGraphBuilder::CreateLoop(IterationStatement* statement,
3125                                                HBasicBlock* loop_entry,
3126                                                HBasicBlock* body_exit,
3127                                                HBasicBlock* loop_successor,
3128                                                HBasicBlock* break_block) {
3129  if (body_exit != NULL) Goto(body_exit, loop_entry);
3130  loop_entry->PostProcessLoopHeader(statement);
3131  if (break_block != NULL) {
3132    if (loop_successor != NULL) Goto(loop_successor, break_block);
3133    break_block->SetJoinId(statement->ExitId());
3134    return break_block;
3135  }
3136  return loop_successor;
3137}
3138
3139
3140// Build a new loop header block and set it as the current block.
3141HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry() {
3142  HBasicBlock* loop_entry = CreateLoopHeaderBlock();
3143  Goto(loop_entry);
3144  set_current_block(loop_entry);
3145  return loop_entry;
3146}
3147
3148
3149HBasicBlock* HOptimizedGraphBuilder::BuildLoopEntry(
3150    IterationStatement* statement) {
3151  HBasicBlock* loop_entry;
3152
3153  if (osr()->HasOsrEntryAt(statement)) {
3154    loop_entry = osr()->BuildOsrLoopEntry(statement);
3155    if (function_state()->IsInsideDoExpressionScope()) {
3156      Bailout(kDoExpressionUnmodelable);
3157    }
3158  } else {
3159    loop_entry = BuildLoopEntry();
3160  }
3161  return loop_entry;
3162}
3163
3164
3165void HBasicBlock::FinishExit(HControlInstruction* instruction,
3166                             SourcePosition position) {
3167  Finish(instruction, position);
3168  ClearEnvironment();
3169}
3170
3171
3172std::ostream& operator<<(std::ostream& os, const HBasicBlock& b) {
3173  return os << "B" << b.block_id();
3174}
3175
3176HGraph::HGraph(CompilationInfo* info, CallInterfaceDescriptor descriptor)
3177    : isolate_(info->isolate()),
3178      next_block_id_(0),
3179      entry_block_(NULL),
3180      blocks_(8, info->zone()),
3181      values_(16, info->zone()),
3182      phi_list_(NULL),
3183      uint32_instructions_(NULL),
3184      osr_(NULL),
3185      info_(info),
3186      descriptor_(descriptor),
3187      zone_(info->zone()),
3188      allow_code_motion_(false),
3189      use_optimistic_licm_(false),
3190      depends_on_empty_array_proto_elements_(false),
3191      depends_on_string_length_overflow_(false),
3192      type_change_checksum_(0),
3193      maximum_environment_size_(0),
3194      no_side_effects_scope_count_(0),
3195      disallow_adding_new_values_(false) {
3196  if (info->IsStub()) {
3197    // For stubs, explicitly add the context to the environment.
3198    start_environment_ =
3199        new (zone_) HEnvironment(zone_, descriptor.GetParameterCount() + 1);
3200  } else {
3201    start_environment_ =
3202        new(zone_) HEnvironment(NULL, info->scope(), info->closure(), zone_);
3203  }
3204  start_environment_->set_ast_id(BailoutId::FunctionContext());
3205  entry_block_ = CreateBasicBlock();
3206  entry_block_->SetInitialEnvironment(start_environment_);
3207}
3208
3209
3210HBasicBlock* HGraph::CreateBasicBlock() {
3211  HBasicBlock* result = new(zone()) HBasicBlock(this);
3212  blocks_.Add(result, zone());
3213  return result;
3214}
3215
3216
3217void HGraph::FinalizeUniqueness() {
3218  DisallowHeapAllocation no_gc;
3219  for (int i = 0; i < blocks()->length(); ++i) {
3220    for (HInstructionIterator it(blocks()->at(i)); !it.Done(); it.Advance()) {
3221      it.Current()->FinalizeUniqueness();
3222    }
3223  }
3224}
3225
3226
3227// Block ordering was implemented with two mutually recursive methods,
3228// HGraph::Postorder and HGraph::PostorderLoopBlocks.
3229// The recursion could lead to stack overflow so the algorithm has been
3230// implemented iteratively.
3231// At a high level the algorithm looks like this:
3232//
3233// Postorder(block, loop_header) : {
3234//   if (block has already been visited or is of another loop) return;
3235//   mark block as visited;
3236//   if (block is a loop header) {
3237//     VisitLoopMembers(block, loop_header);
3238//     VisitSuccessorsOfLoopHeader(block);
3239//   } else {
3240//     VisitSuccessors(block)
3241//   }
3242//   put block in result list;
3243// }
3244//
3245// VisitLoopMembers(block, outer_loop_header) {
3246//   foreach (block b in block loop members) {
3247//     VisitSuccessorsOfLoopMember(b, outer_loop_header);
3248//     if (b is loop header) VisitLoopMembers(b);
3249//   }
3250// }
3251//
3252// VisitSuccessorsOfLoopMember(block, outer_loop_header) {
3253//   foreach (block b in block successors) Postorder(b, outer_loop_header)
3254// }
3255//
3256// VisitSuccessorsOfLoopHeader(block) {
3257//   foreach (block b in block successors) Postorder(b, block)
3258// }
3259//
3260// VisitSuccessors(block, loop_header) {
3261//   foreach (block b in block successors) Postorder(b, loop_header)
3262// }
3263//
3264// The ordering is started calling Postorder(entry, NULL).
3265//
3266// Each instance of PostorderProcessor represents the "stack frame" of the
3267// recursion, and particularly keeps the state of the loop (iteration) of the
3268// "Visit..." function it represents.
3269// To recycle memory we keep all the frames in a double linked list but
3270// this means that we cannot use constructors to initialize the frames.
3271//
3272class PostorderProcessor : public ZoneObject {
3273 public:
3274  // Back link (towards the stack bottom).
3275  PostorderProcessor* parent() {return father_; }
3276  // Forward link (towards the stack top).
3277  PostorderProcessor* child() {return child_; }
3278  HBasicBlock* block() { return block_; }
3279  HLoopInformation* loop() { return loop_; }
3280  HBasicBlock* loop_header() { return loop_header_; }
3281
3282  static PostorderProcessor* CreateEntryProcessor(Zone* zone,
3283                                                  HBasicBlock* block) {
3284    PostorderProcessor* result = new(zone) PostorderProcessor(NULL);
3285    return result->SetupSuccessors(zone, block, NULL);
3286  }
3287
3288  PostorderProcessor* PerformStep(Zone* zone,
3289                                  ZoneList<HBasicBlock*>* order) {
3290    PostorderProcessor* next =
3291        PerformNonBacktrackingStep(zone, order);
3292    if (next != NULL) {
3293      return next;
3294    } else {
3295      return Backtrack(zone, order);
3296    }
3297  }
3298
3299 private:
3300  explicit PostorderProcessor(PostorderProcessor* father)
3301      : father_(father), child_(NULL), successor_iterator(NULL) { }
3302
3303  // Each enum value states the cycle whose state is kept by this instance.
3304  enum LoopKind {
3305    NONE,
3306    SUCCESSORS,
3307    SUCCESSORS_OF_LOOP_HEADER,
3308    LOOP_MEMBERS,
3309    SUCCESSORS_OF_LOOP_MEMBER
3310  };
3311
3312  // Each "Setup..." method is like a constructor for a cycle state.
3313  PostorderProcessor* SetupSuccessors(Zone* zone,
3314                                      HBasicBlock* block,
3315                                      HBasicBlock* loop_header) {
3316    if (block == NULL || block->IsOrdered() ||
3317        block->parent_loop_header() != loop_header) {
3318      kind_ = NONE;
3319      block_ = NULL;
3320      loop_ = NULL;
3321      loop_header_ = NULL;
3322      return this;
3323    } else {
3324      block_ = block;
3325      loop_ = NULL;
3326      block->MarkAsOrdered();
3327
3328      if (block->IsLoopHeader()) {
3329        kind_ = SUCCESSORS_OF_LOOP_HEADER;
3330        loop_header_ = block;
3331        InitializeSuccessors();
3332        PostorderProcessor* result = Push(zone);
3333        return result->SetupLoopMembers(zone, block, block->loop_information(),
3334                                        loop_header);
3335      } else {
3336        DCHECK(block->IsFinished());
3337        kind_ = SUCCESSORS;
3338        loop_header_ = loop_header;
3339        InitializeSuccessors();
3340        return this;
3341      }
3342    }
3343  }
3344
3345  PostorderProcessor* SetupLoopMembers(Zone* zone,
3346                                       HBasicBlock* block,
3347                                       HLoopInformation* loop,
3348                                       HBasicBlock* loop_header) {
3349    kind_ = LOOP_MEMBERS;
3350    block_ = block;
3351    loop_ = loop;
3352    loop_header_ = loop_header;
3353    InitializeLoopMembers();
3354    return this;
3355  }
3356
3357  PostorderProcessor* SetupSuccessorsOfLoopMember(
3358      HBasicBlock* block,
3359      HLoopInformation* loop,
3360      HBasicBlock* loop_header) {
3361    kind_ = SUCCESSORS_OF_LOOP_MEMBER;
3362    block_ = block;
3363    loop_ = loop;
3364    loop_header_ = loop_header;
3365    InitializeSuccessors();
3366    return this;
3367  }
3368
3369  // This method "allocates" a new stack frame.
3370  PostorderProcessor* Push(Zone* zone) {
3371    if (child_ == NULL) {
3372      child_ = new(zone) PostorderProcessor(this);
3373    }
3374    return child_;
3375  }
3376
3377  void ClosePostorder(ZoneList<HBasicBlock*>* order, Zone* zone) {
3378    DCHECK(block_->end()->FirstSuccessor() == NULL ||
3379           order->Contains(block_->end()->FirstSuccessor()) ||
3380           block_->end()->FirstSuccessor()->IsLoopHeader());
3381    DCHECK(block_->end()->SecondSuccessor() == NULL ||
3382           order->Contains(block_->end()->SecondSuccessor()) ||
3383           block_->end()->SecondSuccessor()->IsLoopHeader());
3384    order->Add(block_, zone);
3385  }
3386
3387  // This method is the basic block to walk up the stack.
3388  PostorderProcessor* Pop(Zone* zone,
3389                          ZoneList<HBasicBlock*>* order) {
3390    switch (kind_) {
3391      case SUCCESSORS:
3392      case SUCCESSORS_OF_LOOP_HEADER:
3393        ClosePostorder(order, zone);
3394        return father_;
3395      case LOOP_MEMBERS:
3396        return father_;
3397      case SUCCESSORS_OF_LOOP_MEMBER:
3398        if (block()->IsLoopHeader() && block() != loop_->loop_header()) {
3399          // In this case we need to perform a LOOP_MEMBERS cycle so we
3400          // initialize it and return this instead of father.
3401          return SetupLoopMembers(zone, block(),
3402                                  block()->loop_information(), loop_header_);
3403        } else {
3404          return father_;
3405        }
3406      case NONE:
3407        return father_;
3408    }
3409    UNREACHABLE();
3410    return NULL;
3411  }
3412
3413  // Walks up the stack.
3414  PostorderProcessor* Backtrack(Zone* zone,
3415                                ZoneList<HBasicBlock*>* order) {
3416    PostorderProcessor* parent = Pop(zone, order);
3417    while (parent != NULL) {
3418      PostorderProcessor* next =
3419          parent->PerformNonBacktrackingStep(zone, order);
3420      if (next != NULL) {
3421        return next;
3422      } else {
3423        parent = parent->Pop(zone, order);
3424      }
3425    }
3426    return NULL;
3427  }
3428
3429  PostorderProcessor* PerformNonBacktrackingStep(
3430      Zone* zone,
3431      ZoneList<HBasicBlock*>* order) {
3432    HBasicBlock* next_block;
3433    switch (kind_) {
3434      case SUCCESSORS:
3435        next_block = AdvanceSuccessors();
3436        if (next_block != NULL) {
3437          PostorderProcessor* result = Push(zone);
3438          return result->SetupSuccessors(zone, next_block, loop_header_);
3439        }
3440        break;
3441      case SUCCESSORS_OF_LOOP_HEADER:
3442        next_block = AdvanceSuccessors();
3443        if (next_block != NULL) {
3444          PostorderProcessor* result = Push(zone);
3445          return result->SetupSuccessors(zone, next_block, block());
3446        }
3447        break;
3448      case LOOP_MEMBERS:
3449        next_block = AdvanceLoopMembers();
3450        if (next_block != NULL) {
3451          PostorderProcessor* result = Push(zone);
3452          return result->SetupSuccessorsOfLoopMember(next_block,
3453                                                     loop_, loop_header_);
3454        }
3455        break;
3456      case SUCCESSORS_OF_LOOP_MEMBER:
3457        next_block = AdvanceSuccessors();
3458        if (next_block != NULL) {
3459          PostorderProcessor* result = Push(zone);
3460          return result->SetupSuccessors(zone, next_block, loop_header_);
3461        }
3462        break;
3463      case NONE:
3464        return NULL;
3465    }
3466    return NULL;
3467  }
3468
3469  // The following two methods implement a "foreach b in successors" cycle.
3470  void InitializeSuccessors() {
3471    loop_index = 0;
3472    loop_length = 0;
3473    successor_iterator = HSuccessorIterator(block_->end());
3474  }
3475
3476  HBasicBlock* AdvanceSuccessors() {
3477    if (!successor_iterator.Done()) {
3478      HBasicBlock* result = successor_iterator.Current();
3479      successor_iterator.Advance();
3480      return result;
3481    }
3482    return NULL;
3483  }
3484
3485  // The following two methods implement a "foreach b in loop members" cycle.
3486  void InitializeLoopMembers() {
3487    loop_index = 0;
3488    loop_length = loop_->blocks()->length();
3489  }
3490
3491  HBasicBlock* AdvanceLoopMembers() {
3492    if (loop_index < loop_length) {
3493      HBasicBlock* result = loop_->blocks()->at(loop_index);
3494      loop_index++;
3495      return result;
3496    } else {
3497      return NULL;
3498    }
3499  }
3500
3501  LoopKind kind_;
3502  PostorderProcessor* father_;
3503  PostorderProcessor* child_;
3504  HLoopInformation* loop_;
3505  HBasicBlock* block_;
3506  HBasicBlock* loop_header_;
3507  int loop_index;
3508  int loop_length;
3509  HSuccessorIterator successor_iterator;
3510};
3511
3512
3513void HGraph::OrderBlocks() {
3514  CompilationPhase phase("H_Block ordering", info());
3515
3516#ifdef DEBUG
3517  // Initially the blocks must not be ordered.
3518  for (int i = 0; i < blocks_.length(); ++i) {
3519    DCHECK(!blocks_[i]->IsOrdered());
3520  }
3521#endif
3522
3523  PostorderProcessor* postorder =
3524      PostorderProcessor::CreateEntryProcessor(zone(), blocks_[0]);
3525  blocks_.Rewind(0);
3526  while (postorder) {
3527    postorder = postorder->PerformStep(zone(), &blocks_);
3528  }
3529
3530#ifdef DEBUG
3531  // Now all blocks must be marked as ordered.
3532  for (int i = 0; i < blocks_.length(); ++i) {
3533    DCHECK(blocks_[i]->IsOrdered());
3534  }
3535#endif
3536
3537  // Reverse block list and assign block IDs.
3538  for (int i = 0, j = blocks_.length(); --j >= i; ++i) {
3539    HBasicBlock* bi = blocks_[i];
3540    HBasicBlock* bj = blocks_[j];
3541    bi->set_block_id(j);
3542    bj->set_block_id(i);
3543    blocks_[i] = bj;
3544    blocks_[j] = bi;
3545  }
3546}
3547
3548
3549void HGraph::AssignDominators() {
3550  HPhase phase("H_Assign dominators", this);
3551  for (int i = 0; i < blocks_.length(); ++i) {
3552    HBasicBlock* block = blocks_[i];
3553    if (block->IsLoopHeader()) {
3554      // Only the first predecessor of a loop header is from outside the loop.
3555      // All others are back edges, and thus cannot dominate the loop header.
3556      block->AssignCommonDominator(block->predecessors()->first());
3557      block->AssignLoopSuccessorDominators();
3558    } else {
3559      for (int j = blocks_[i]->predecessors()->length() - 1; j >= 0; --j) {
3560        blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->at(j));
3561      }
3562    }
3563  }
3564}
3565
3566
3567bool HGraph::CheckArgumentsPhiUses() {
3568  int block_count = blocks_.length();
3569  for (int i = 0; i < block_count; ++i) {
3570    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3571      HPhi* phi = blocks_[i]->phis()->at(j);
3572      // We don't support phi uses of arguments for now.
3573      if (phi->CheckFlag(HValue::kIsArguments)) return false;
3574    }
3575  }
3576  return true;
3577}
3578
3579
3580bool HGraph::CheckConstPhiUses() {
3581  int block_count = blocks_.length();
3582  for (int i = 0; i < block_count; ++i) {
3583    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3584      HPhi* phi = blocks_[i]->phis()->at(j);
3585      // Check for the hole value (from an uninitialized const).
3586      for (int k = 0; k < phi->OperandCount(); k++) {
3587        if (phi->OperandAt(k) == GetConstantHole()) return false;
3588      }
3589    }
3590  }
3591  return true;
3592}
3593
3594
3595void HGraph::CollectPhis() {
3596  int block_count = blocks_.length();
3597  phi_list_ = new(zone()) ZoneList<HPhi*>(block_count, zone());
3598  for (int i = 0; i < block_count; ++i) {
3599    for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
3600      HPhi* phi = blocks_[i]->phis()->at(j);
3601      phi_list_->Add(phi, zone());
3602    }
3603  }
3604}
3605
3606
3607// Implementation of utility class to encapsulate the translation state for
3608// a (possibly inlined) function.
3609FunctionState::FunctionState(HOptimizedGraphBuilder* owner,
3610                             CompilationInfo* info, InliningKind inlining_kind,
3611                             int inlining_id, TailCallMode tail_call_mode)
3612    : owner_(owner),
3613      compilation_info_(info),
3614      call_context_(NULL),
3615      inlining_kind_(inlining_kind),
3616      tail_call_mode_(tail_call_mode),
3617      function_return_(NULL),
3618      test_context_(NULL),
3619      entry_(NULL),
3620      arguments_object_(NULL),
3621      arguments_elements_(NULL),
3622      inlining_id_(inlining_id),
3623      outer_source_position_(SourcePosition::Unknown()),
3624      do_expression_scope_count_(0),
3625      outer_(owner->function_state()) {
3626  if (outer_ != NULL) {
3627    // State for an inline function.
3628    if (owner->ast_context()->IsTest()) {
3629      HBasicBlock* if_true = owner->graph()->CreateBasicBlock();
3630      HBasicBlock* if_false = owner->graph()->CreateBasicBlock();
3631      if_true->MarkAsInlineReturnTarget(owner->current_block());
3632      if_false->MarkAsInlineReturnTarget(owner->current_block());
3633      TestContext* outer_test_context = TestContext::cast(owner->ast_context());
3634      Expression* cond = outer_test_context->condition();
3635      // The AstContext constructor pushed on the context stack.  This newed
3636      // instance is the reason that AstContext can't be BASE_EMBEDDED.
3637      test_context_ = new TestContext(owner, cond, if_true, if_false);
3638    } else {
3639      function_return_ = owner->graph()->CreateBasicBlock();
3640      function_return()->MarkAsInlineReturnTarget(owner->current_block());
3641    }
3642    // Set this after possibly allocating a new TestContext above.
3643    call_context_ = owner->ast_context();
3644  }
3645
3646  // Push on the state stack.
3647  owner->set_function_state(this);
3648
3649  if (owner->is_tracking_positions()) {
3650    outer_source_position_ = owner->source_position();
3651    owner->EnterInlinedSource(inlining_id);
3652    owner->SetSourcePosition(info->shared_info()->start_position());
3653  }
3654}
3655
3656
3657FunctionState::~FunctionState() {
3658  delete test_context_;
3659  owner_->set_function_state(outer_);
3660
3661  if (owner_->is_tracking_positions()) {
3662    owner_->set_source_position(outer_source_position_);
3663    owner_->EnterInlinedSource(outer_->inlining_id());
3664  }
3665}
3666
3667
3668// Implementation of utility classes to represent an expression's context in
3669// the AST.
3670AstContext::AstContext(HOptimizedGraphBuilder* owner, Expression::Context kind)
3671    : owner_(owner),
3672      kind_(kind),
3673      outer_(owner->ast_context()),
3674      typeof_mode_(NOT_INSIDE_TYPEOF) {
3675  owner->set_ast_context(this);  // Push.
3676#ifdef DEBUG
3677  DCHECK_EQ(JS_FUNCTION, owner->environment()->frame_type());
3678  original_length_ = owner->environment()->length();
3679#endif
3680}
3681
3682
3683AstContext::~AstContext() {
3684  owner_->set_ast_context(outer_);  // Pop.
3685}
3686
3687
3688EffectContext::~EffectContext() {
3689  DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
3690         (owner()->environment()->length() == original_length_ &&
3691          (owner()->environment()->frame_type() == JS_FUNCTION ||
3692           owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
3693}
3694
3695
3696ValueContext::~ValueContext() {
3697  DCHECK(owner()->HasStackOverflow() || owner()->current_block() == NULL ||
3698         (owner()->environment()->length() == original_length_ + 1 &&
3699          (owner()->environment()->frame_type() == JS_FUNCTION ||
3700           owner()->environment()->frame_type() == TAIL_CALLER_FUNCTION)));
3701}
3702
3703
3704void EffectContext::ReturnValue(HValue* value) {
3705  // The value is simply ignored.
3706}
3707
3708
3709void ValueContext::ReturnValue(HValue* value) {
3710  // The value is tracked in the bailout environment, and communicated
3711  // through the environment as the result of the expression.
3712  if (value->CheckFlag(HValue::kIsArguments)) {
3713    if (flag_ == ARGUMENTS_FAKED) {
3714      value = owner()->graph()->GetConstantUndefined();
3715    } else if (!arguments_allowed()) {
3716      owner()->Bailout(kBadValueContextForArgumentsValue);
3717    }
3718  }
3719  owner()->Push(value);
3720}
3721
3722
3723void TestContext::ReturnValue(HValue* value) {
3724  BuildBranch(value);
3725}
3726
3727
3728void EffectContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3729  DCHECK(!instr->IsControlInstruction());
3730  owner()->AddInstruction(instr);
3731  if (instr->HasObservableSideEffects()) {
3732    owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3733  }
3734}
3735
3736
3737void EffectContext::ReturnControl(HControlInstruction* instr,
3738                                  BailoutId ast_id) {
3739  DCHECK(!instr->HasObservableSideEffects());
3740  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3741  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3742  instr->SetSuccessorAt(0, empty_true);
3743  instr->SetSuccessorAt(1, empty_false);
3744  owner()->FinishCurrentBlock(instr);
3745  HBasicBlock* join = owner()->CreateJoin(empty_true, empty_false, ast_id);
3746  owner()->set_current_block(join);
3747}
3748
3749
3750void EffectContext::ReturnContinuation(HIfContinuation* continuation,
3751                                       BailoutId ast_id) {
3752  HBasicBlock* true_branch = NULL;
3753  HBasicBlock* false_branch = NULL;
3754  continuation->Continue(&true_branch, &false_branch);
3755  if (!continuation->IsTrueReachable()) {
3756    owner()->set_current_block(false_branch);
3757  } else if (!continuation->IsFalseReachable()) {
3758    owner()->set_current_block(true_branch);
3759  } else {
3760    HBasicBlock* join = owner()->CreateJoin(true_branch, false_branch, ast_id);
3761    owner()->set_current_block(join);
3762  }
3763}
3764
3765
3766void ValueContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3767  DCHECK(!instr->IsControlInstruction());
3768  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3769    return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3770  }
3771  owner()->AddInstruction(instr);
3772  owner()->Push(instr);
3773  if (instr->HasObservableSideEffects()) {
3774    owner()->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3775  }
3776}
3777
3778
3779void ValueContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3780  DCHECK(!instr->HasObservableSideEffects());
3781  if (!arguments_allowed() && instr->CheckFlag(HValue::kIsArguments)) {
3782    return owner()->Bailout(kBadValueContextForArgumentsObjectValue);
3783  }
3784  HBasicBlock* materialize_false = owner()->graph()->CreateBasicBlock();
3785  HBasicBlock* materialize_true = owner()->graph()->CreateBasicBlock();
3786  instr->SetSuccessorAt(0, materialize_true);
3787  instr->SetSuccessorAt(1, materialize_false);
3788  owner()->FinishCurrentBlock(instr);
3789  owner()->set_current_block(materialize_true);
3790  owner()->Push(owner()->graph()->GetConstantTrue());
3791  owner()->set_current_block(materialize_false);
3792  owner()->Push(owner()->graph()->GetConstantFalse());
3793  HBasicBlock* join =
3794    owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3795  owner()->set_current_block(join);
3796}
3797
3798
3799void ValueContext::ReturnContinuation(HIfContinuation* continuation,
3800                                      BailoutId ast_id) {
3801  HBasicBlock* materialize_true = NULL;
3802  HBasicBlock* materialize_false = NULL;
3803  continuation->Continue(&materialize_true, &materialize_false);
3804  if (continuation->IsTrueReachable()) {
3805    owner()->set_current_block(materialize_true);
3806    owner()->Push(owner()->graph()->GetConstantTrue());
3807    owner()->set_current_block(materialize_true);
3808  }
3809  if (continuation->IsFalseReachable()) {
3810    owner()->set_current_block(materialize_false);
3811    owner()->Push(owner()->graph()->GetConstantFalse());
3812    owner()->set_current_block(materialize_false);
3813  }
3814  if (continuation->TrueAndFalseReachable()) {
3815    HBasicBlock* join =
3816        owner()->CreateJoin(materialize_true, materialize_false, ast_id);
3817    owner()->set_current_block(join);
3818  }
3819}
3820
3821
3822void TestContext::ReturnInstruction(HInstruction* instr, BailoutId ast_id) {
3823  DCHECK(!instr->IsControlInstruction());
3824  HOptimizedGraphBuilder* builder = owner();
3825  builder->AddInstruction(instr);
3826  // We expect a simulate after every expression with side effects, though
3827  // this one isn't actually needed (and wouldn't work if it were targeted).
3828  if (instr->HasObservableSideEffects()) {
3829    builder->Push(instr);
3830    builder->Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
3831    builder->Pop();
3832  }
3833  BuildBranch(instr);
3834}
3835
3836
3837void TestContext::ReturnControl(HControlInstruction* instr, BailoutId ast_id) {
3838  DCHECK(!instr->HasObservableSideEffects());
3839  HBasicBlock* empty_true = owner()->graph()->CreateBasicBlock();
3840  HBasicBlock* empty_false = owner()->graph()->CreateBasicBlock();
3841  instr->SetSuccessorAt(0, empty_true);
3842  instr->SetSuccessorAt(1, empty_false);
3843  owner()->FinishCurrentBlock(instr);
3844  owner()->Goto(empty_true, if_true(), owner()->function_state());
3845  owner()->Goto(empty_false, if_false(), owner()->function_state());
3846  owner()->set_current_block(NULL);
3847}
3848
3849
3850void TestContext::ReturnContinuation(HIfContinuation* continuation,
3851                                     BailoutId ast_id) {
3852  HBasicBlock* true_branch = NULL;
3853  HBasicBlock* false_branch = NULL;
3854  continuation->Continue(&true_branch, &false_branch);
3855  if (continuation->IsTrueReachable()) {
3856    owner()->Goto(true_branch, if_true(), owner()->function_state());
3857  }
3858  if (continuation->IsFalseReachable()) {
3859    owner()->Goto(false_branch, if_false(), owner()->function_state());
3860  }
3861  owner()->set_current_block(NULL);
3862}
3863
3864
3865void TestContext::BuildBranch(HValue* value) {
3866  // We expect the graph to be in edge-split form: there is no edge that
3867  // connects a branch node to a join node.  We conservatively ensure that
3868  // property by always adding an empty block on the outgoing edges of this
3869  // branch.
3870  HOptimizedGraphBuilder* builder = owner();
3871  if (value != NULL && value->CheckFlag(HValue::kIsArguments)) {
3872    builder->Bailout(kArgumentsObjectValueInATestContext);
3873  }
3874  ToBooleanHints expected(condition()->to_boolean_types());
3875  ReturnControl(owner()->New<HBranch>(value, expected), BailoutId::None());
3876}
3877
3878
3879// HOptimizedGraphBuilder infrastructure for bailing out and checking bailouts.
3880#define CHECK_BAILOUT(call)                     \
3881  do {                                          \
3882    call;                                       \
3883    if (HasStackOverflow()) return;             \
3884  } while (false)
3885
3886
3887#define CHECK_ALIVE(call)                                       \
3888  do {                                                          \
3889    call;                                                       \
3890    if (HasStackOverflow() || current_block() == NULL) return;  \
3891  } while (false)
3892
3893
3894#define CHECK_ALIVE_OR_RETURN(call, value)                            \
3895  do {                                                                \
3896    call;                                                             \
3897    if (HasStackOverflow() || current_block() == NULL) return value;  \
3898  } while (false)
3899
3900
3901void HOptimizedGraphBuilder::Bailout(BailoutReason reason) {
3902  current_info()->AbortOptimization(reason);
3903  SetStackOverflow();
3904}
3905
3906
3907void HOptimizedGraphBuilder::VisitForEffect(Expression* expr) {
3908  EffectContext for_effect(this);
3909  Visit(expr);
3910}
3911
3912
3913void HOptimizedGraphBuilder::VisitForValue(Expression* expr,
3914                                           ArgumentsAllowedFlag flag) {
3915  ValueContext for_value(this, flag);
3916  Visit(expr);
3917}
3918
3919
3920void HOptimizedGraphBuilder::VisitForTypeOf(Expression* expr) {
3921  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
3922  for_value.set_typeof_mode(INSIDE_TYPEOF);
3923  Visit(expr);
3924}
3925
3926
3927void HOptimizedGraphBuilder::VisitForControl(Expression* expr,
3928                                             HBasicBlock* true_block,
3929                                             HBasicBlock* false_block) {
3930  TestContext for_control(this, expr, true_block, false_block);
3931  Visit(expr);
3932}
3933
3934
3935void HOptimizedGraphBuilder::VisitExpressions(
3936    ZoneList<Expression*>* exprs) {
3937  for (int i = 0; i < exprs->length(); ++i) {
3938    CHECK_ALIVE(VisitForValue(exprs->at(i)));
3939  }
3940}
3941
3942
3943void HOptimizedGraphBuilder::VisitExpressions(ZoneList<Expression*>* exprs,
3944                                              ArgumentsAllowedFlag flag) {
3945  for (int i = 0; i < exprs->length(); ++i) {
3946    CHECK_ALIVE(VisitForValue(exprs->at(i), flag));
3947  }
3948}
3949
3950
3951bool HOptimizedGraphBuilder::BuildGraph() {
3952  if (IsDerivedConstructor(current_info()->literal()->kind())) {
3953    Bailout(kSuperReference);
3954    return false;
3955  }
3956
3957  DeclarationScope* scope = current_info()->scope();
3958  SetUpScope(scope);
3959
3960  // Add an edge to the body entry.  This is warty: the graph's start
3961  // environment will be used by the Lithium translation as the initial
3962  // environment on graph entry, but it has now been mutated by the
3963  // Hydrogen translation of the instructions in the start block.  This
3964  // environment uses values which have not been defined yet.  These
3965  // Hydrogen instructions will then be replayed by the Lithium
3966  // translation, so they cannot have an environment effect.  The edge to
3967  // the body's entry block (along with some special logic for the start
3968  // block in HInstruction::InsertAfter) seals the start block from
3969  // getting unwanted instructions inserted.
3970  //
3971  // TODO(kmillikin): Fix this.  Stop mutating the initial environment.
3972  // Make the Hydrogen instructions in the initial block into Hydrogen
3973  // values (but not instructions), present in the initial environment and
3974  // not replayed by the Lithium translation.
3975  HEnvironment* initial_env = environment()->CopyWithoutHistory();
3976  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
3977  Goto(body_entry);
3978  body_entry->SetJoinId(BailoutId::FunctionEntry());
3979  set_current_block(body_entry);
3980
3981  VisitDeclarations(scope->declarations());
3982  Add<HSimulate>(BailoutId::Declarations());
3983
3984  Add<HStackCheck>(HStackCheck::kFunctionEntry);
3985
3986  VisitStatements(current_info()->literal()->body());
3987  if (HasStackOverflow()) return false;
3988
3989  if (current_block() != NULL) {
3990    Add<HReturn>(graph()->GetConstantUndefined());
3991    set_current_block(NULL);
3992  }
3993
3994  // If the checksum of the number of type info changes is the same as the
3995  // last time this function was compiled, then this recompile is likely not
3996  // due to missing/inadequate type feedback, but rather too aggressive
3997  // optimization. Disable optimistic LICM in that case.
3998  Handle<Code> unoptimized_code(current_info()->shared_info()->code());
3999  DCHECK(unoptimized_code->kind() == Code::FUNCTION);
4000  Handle<TypeFeedbackInfo> type_info(
4001      TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
4002  int checksum = type_info->own_type_change_checksum();
4003  int composite_checksum = graph()->update_type_change_checksum(checksum);
4004  graph()->set_use_optimistic_licm(
4005      !type_info->matches_inlined_type_change_checksum(composite_checksum));
4006  type_info->set_inlined_type_change_checksum(composite_checksum);
4007
4008  // Set this predicate early to avoid handle deref during graph optimization.
4009  graph()->set_allow_code_motion(
4010      current_info()->IsStub() ||
4011      current_info()->shared_info()->opt_count() + 1 < FLAG_max_opt_count);
4012
4013  // Perform any necessary OSR-specific cleanups or changes to the graph.
4014  osr()->FinishGraph();
4015
4016  return true;
4017}
4018
4019
4020bool HGraph::Optimize(BailoutReason* bailout_reason) {
4021  OrderBlocks();
4022  AssignDominators();
4023
4024  // We need to create a HConstant "zero" now so that GVN will fold every
4025  // zero-valued constant in the graph together.
4026  // The constant is needed to make idef-based bounds check work: the pass
4027  // evaluates relations with "zero" and that zero cannot be created after GVN.
4028  GetConstant0();
4029
4030#ifdef DEBUG
4031  // Do a full verify after building the graph and computing dominators.
4032  Verify(true);
4033#endif
4034
4035  if (FLAG_analyze_environment_liveness && maximum_environment_size() != 0) {
4036    Run<HEnvironmentLivenessAnalysisPhase>();
4037  }
4038
4039  if (!CheckConstPhiUses()) {
4040    *bailout_reason = kUnsupportedPhiUseOfConstVariable;
4041    return false;
4042  }
4043  Run<HRedundantPhiEliminationPhase>();
4044  if (!CheckArgumentsPhiUses()) {
4045    *bailout_reason = kUnsupportedPhiUseOfArguments;
4046    return false;
4047  }
4048
4049  // Find and mark unreachable code to simplify optimizations, especially gvn,
4050  // where unreachable code could unnecessarily defeat LICM.
4051  Run<HMarkUnreachableBlocksPhase>();
4052
4053  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4054  if (FLAG_use_escape_analysis) Run<HEscapeAnalysisPhase>();
4055
4056  if (FLAG_load_elimination) Run<HLoadEliminationPhase>();
4057
4058  CollectPhis();
4059
4060  if (has_osr()) osr()->FinishOsrValues();
4061
4062  Run<HInferRepresentationPhase>();
4063
4064  // Remove HSimulate instructions that have turned out not to be needed
4065  // after all by folding them into the following HSimulate.
4066  // This must happen after inferring representations.
4067  Run<HMergeRemovableSimulatesPhase>();
4068
4069  Run<HRepresentationChangesPhase>();
4070
4071  Run<HInferTypesPhase>();
4072
4073  // Must be performed before canonicalization to ensure that Canonicalize
4074  // will not remove semantically meaningful ToInt32 operations e.g. BIT_OR with
4075  // zero.
4076  Run<HUint32AnalysisPhase>();
4077
4078  if (FLAG_use_canonicalizing) Run<HCanonicalizePhase>();
4079
4080  if (FLAG_use_gvn) Run<HGlobalValueNumberingPhase>();
4081
4082  if (FLAG_check_elimination) Run<HCheckEliminationPhase>();
4083
4084  if (FLAG_store_elimination) Run<HStoreEliminationPhase>();
4085
4086  Run<HRangeAnalysisPhase>();
4087
4088  // Eliminate redundant stack checks on backwards branches.
4089  Run<HStackCheckEliminationPhase>();
4090
4091  if (FLAG_array_bounds_checks_elimination) Run<HBoundsCheckEliminationPhase>();
4092  if (FLAG_array_index_dehoisting) Run<HDehoistIndexComputationsPhase>();
4093  if (FLAG_dead_code_elimination) Run<HDeadCodeEliminationPhase>();
4094
4095  RestoreActualValues();
4096
4097  // Find unreachable code a second time, GVN and other optimizations may have
4098  // made blocks unreachable that were previously reachable.
4099  Run<HMarkUnreachableBlocksPhase>();
4100
4101  return true;
4102}
4103
4104
4105void HGraph::RestoreActualValues() {
4106  HPhase phase("H_Restore actual values", this);
4107
4108  for (int block_index = 0; block_index < blocks()->length(); block_index++) {
4109    HBasicBlock* block = blocks()->at(block_index);
4110
4111#ifdef DEBUG
4112    for (int i = 0; i < block->phis()->length(); i++) {
4113      HPhi* phi = block->phis()->at(i);
4114      DCHECK(phi->ActualValue() == phi);
4115    }
4116#endif
4117
4118    for (HInstructionIterator it(block); !it.Done(); it.Advance()) {
4119      HInstruction* instruction = it.Current();
4120      if (instruction->ActualValue() == instruction) continue;
4121      if (instruction->CheckFlag(HValue::kIsDead)) {
4122        // The instruction was marked as deleted but left in the graph
4123        // as a control flow dependency point for subsequent
4124        // instructions.
4125        instruction->DeleteAndReplaceWith(instruction->ActualValue());
4126      } else {
4127        DCHECK(instruction->IsInformativeDefinition());
4128        if (instruction->IsPurelyInformativeDefinition()) {
4129          instruction->DeleteAndReplaceWith(instruction->RedefinedOperand());
4130        } else {
4131          instruction->ReplaceAllUsesWith(instruction->ActualValue());
4132        }
4133      }
4134    }
4135  }
4136}
4137
4138
4139void HOptimizedGraphBuilder::PushArgumentsFromEnvironment(int count) {
4140  ZoneList<HValue*> arguments(count, zone());
4141  for (int i = 0; i < count; ++i) {
4142    arguments.Add(Pop(), zone());
4143  }
4144
4145  HPushArguments* push_args = New<HPushArguments>();
4146  while (!arguments.is_empty()) {
4147    push_args->AddInput(arguments.RemoveLast());
4148  }
4149  AddInstruction(push_args);
4150}
4151
4152
4153template <class Instruction>
4154HInstruction* HOptimizedGraphBuilder::PreProcessCall(Instruction* call) {
4155  PushArgumentsFromEnvironment(call->argument_count());
4156  return call;
4157}
4158
4159void HOptimizedGraphBuilder::SetUpScope(DeclarationScope* scope) {
4160  HEnvironment* prolog_env = environment();
4161  int parameter_count = environment()->parameter_count();
4162  ZoneList<HValue*> parameters(parameter_count, zone());
4163  for (int i = 0; i < parameter_count; ++i) {
4164    HInstruction* parameter = Add<HParameter>(static_cast<unsigned>(i));
4165    parameters.Add(parameter, zone());
4166    environment()->Bind(i, parameter);
4167  }
4168
4169  HConstant* undefined_constant = graph()->GetConstantUndefined();
4170  // Initialize specials and locals to undefined.
4171  for (int i = parameter_count + 1; i < environment()->length(); ++i) {
4172    environment()->Bind(i, undefined_constant);
4173  }
4174  Add<HPrologue>();
4175
4176  HEnvironment* initial_env = environment()->CopyWithoutHistory();
4177  HBasicBlock* body_entry = CreateBasicBlock(initial_env);
4178  GotoNoSimulate(body_entry);
4179  set_current_block(body_entry);
4180
4181  // Initialize context of prolog environment to undefined.
4182  prolog_env->BindContext(undefined_constant);
4183
4184  // First special is HContext.
4185  HInstruction* context = Add<HContext>();
4186  environment()->BindContext(context);
4187
4188  // Create an arguments object containing the initial parameters.  Set the
4189  // initial values of parameters including "this" having parameter index 0.
4190  DCHECK_EQ(scope->num_parameters() + 1, parameter_count);
4191  HArgumentsObject* arguments_object = New<HArgumentsObject>(parameter_count);
4192  for (int i = 0; i < parameter_count; ++i) {
4193    HValue* parameter = parameters.at(i);
4194    arguments_object->AddArgument(parameter, zone());
4195  }
4196
4197  AddInstruction(arguments_object);
4198
4199  // Handle the arguments and arguments shadow variables specially (they do
4200  // not have declarations).
4201  if (scope->arguments() != NULL) {
4202    environment()->Bind(scope->arguments(), arguments_object);
4203  }
4204
4205  if (scope->rest_parameter() != nullptr) {
4206    return Bailout(kRestParameter);
4207  }
4208
4209  if (scope->this_function_var() != nullptr ||
4210      scope->new_target_var() != nullptr) {
4211    return Bailout(kSuperReference);
4212  }
4213
4214  // Trace the call.
4215  if (FLAG_trace && top_info()->IsOptimizing()) {
4216    Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kTraceEnter), 0);
4217  }
4218}
4219
4220
4221void HOptimizedGraphBuilder::VisitStatements(ZoneList<Statement*>* statements) {
4222  for (int i = 0; i < statements->length(); i++) {
4223    Statement* stmt = statements->at(i);
4224    CHECK_ALIVE(Visit(stmt));
4225    if (stmt->IsJump()) break;
4226  }
4227}
4228
4229
4230void HOptimizedGraphBuilder::VisitBlock(Block* stmt) {
4231  DCHECK(!HasStackOverflow());
4232  DCHECK(current_block() != NULL);
4233  DCHECK(current_block()->HasPredecessor());
4234
4235  Scope* outer_scope = scope();
4236  Scope* scope = stmt->scope();
4237  BreakAndContinueInfo break_info(stmt, outer_scope);
4238
4239  { BreakAndContinueScope push(&break_info, this);
4240    if (scope != NULL) {
4241      if (scope->NeedsContext()) {
4242        // Load the function object.
4243        DeclarationScope* declaration_scope = scope->GetDeclarationScope();
4244        HInstruction* function;
4245        HValue* outer_context = environment()->context();
4246        if (declaration_scope->is_script_scope() ||
4247            declaration_scope->is_eval_scope()) {
4248          function = new (zone())
4249              HLoadContextSlot(outer_context, Context::CLOSURE_INDEX,
4250                               HLoadContextSlot::kNoCheck);
4251        } else {
4252          function = New<HThisFunction>();
4253        }
4254        AddInstruction(function);
4255        // Allocate a block context and store it to the stack frame.
4256        HValue* scope_info = Add<HConstant>(scope->scope_info());
4257        Add<HPushArguments>(scope_info, function);
4258        HInstruction* inner_context = Add<HCallRuntime>(
4259            Runtime::FunctionForId(Runtime::kPushBlockContext), 2);
4260        inner_context->SetFlag(HValue::kHasNoObservableSideEffects);
4261        set_scope(scope);
4262        environment()->BindContext(inner_context);
4263      }
4264      VisitDeclarations(scope->declarations());
4265      AddSimulate(stmt->DeclsId(), REMOVABLE_SIMULATE);
4266    }
4267    CHECK_BAILOUT(VisitStatements(stmt->statements()));
4268  }
4269  set_scope(outer_scope);
4270  if (scope != NULL && current_block() != NULL &&
4271      scope->ContextLocalCount() > 0) {
4272    HValue* inner_context = environment()->context();
4273    HValue* outer_context = Add<HLoadNamedField>(
4274        inner_context, nullptr,
4275        HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4276
4277    environment()->BindContext(outer_context);
4278  }
4279  HBasicBlock* break_block = break_info.break_block();
4280  if (break_block != NULL) {
4281    if (current_block() != NULL) Goto(break_block);
4282    break_block->SetJoinId(stmt->ExitId());
4283    set_current_block(break_block);
4284  }
4285}
4286
4287
4288void HOptimizedGraphBuilder::VisitExpressionStatement(
4289    ExpressionStatement* stmt) {
4290  DCHECK(!HasStackOverflow());
4291  DCHECK(current_block() != NULL);
4292  DCHECK(current_block()->HasPredecessor());
4293  VisitForEffect(stmt->expression());
4294}
4295
4296
4297void HOptimizedGraphBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
4298  DCHECK(!HasStackOverflow());
4299  DCHECK(current_block() != NULL);
4300  DCHECK(current_block()->HasPredecessor());
4301}
4302
4303
4304void HOptimizedGraphBuilder::VisitSloppyBlockFunctionStatement(
4305    SloppyBlockFunctionStatement* stmt) {
4306  Visit(stmt->statement());
4307}
4308
4309
4310void HOptimizedGraphBuilder::VisitIfStatement(IfStatement* stmt) {
4311  DCHECK(!HasStackOverflow());
4312  DCHECK(current_block() != NULL);
4313  DCHECK(current_block()->HasPredecessor());
4314  if (stmt->condition()->ToBooleanIsTrue()) {
4315    Add<HSimulate>(stmt->ThenId());
4316    Visit(stmt->then_statement());
4317  } else if (stmt->condition()->ToBooleanIsFalse()) {
4318    Add<HSimulate>(stmt->ElseId());
4319    Visit(stmt->else_statement());
4320  } else {
4321    HBasicBlock* cond_true = graph()->CreateBasicBlock();
4322    HBasicBlock* cond_false = graph()->CreateBasicBlock();
4323    CHECK_BAILOUT(VisitForControl(stmt->condition(), cond_true, cond_false));
4324
4325    // Technically, we should be able to handle the case when one side of
4326    // the test is not connected, but this can trip up liveness analysis
4327    // if we did not fully connect the test context based on some optimistic
4328    // assumption. If such an assumption was violated, we would end up with
4329    // an environment with optimized-out values. So we should always
4330    // conservatively connect the test context.
4331    CHECK(cond_true->HasPredecessor());
4332    CHECK(cond_false->HasPredecessor());
4333
4334    cond_true->SetJoinId(stmt->ThenId());
4335    set_current_block(cond_true);
4336    CHECK_BAILOUT(Visit(stmt->then_statement()));
4337    cond_true = current_block();
4338
4339    cond_false->SetJoinId(stmt->ElseId());
4340    set_current_block(cond_false);
4341    CHECK_BAILOUT(Visit(stmt->else_statement()));
4342    cond_false = current_block();
4343
4344    HBasicBlock* join = CreateJoin(cond_true, cond_false, stmt->IfId());
4345    set_current_block(join);
4346  }
4347}
4348
4349
4350HBasicBlock* HOptimizedGraphBuilder::BreakAndContinueScope::Get(
4351    BreakableStatement* stmt,
4352    BreakType type,
4353    Scope** scope,
4354    int* drop_extra) {
4355  *drop_extra = 0;
4356  BreakAndContinueScope* current = this;
4357  while (current != NULL && current->info()->target() != stmt) {
4358    *drop_extra += current->info()->drop_extra();
4359    current = current->next();
4360  }
4361  DCHECK(current != NULL);  // Always found (unless stack is malformed).
4362  *scope = current->info()->scope();
4363
4364  if (type == BREAK) {
4365    *drop_extra += current->info()->drop_extra();
4366  }
4367
4368  HBasicBlock* block = NULL;
4369  switch (type) {
4370    case BREAK:
4371      block = current->info()->break_block();
4372      if (block == NULL) {
4373        block = current->owner()->graph()->CreateBasicBlock();
4374        current->info()->set_break_block(block);
4375      }
4376      break;
4377
4378    case CONTINUE:
4379      block = current->info()->continue_block();
4380      if (block == NULL) {
4381        block = current->owner()->graph()->CreateBasicBlock();
4382        current->info()->set_continue_block(block);
4383      }
4384      break;
4385  }
4386
4387  return block;
4388}
4389
4390
4391void HOptimizedGraphBuilder::VisitContinueStatement(
4392    ContinueStatement* stmt) {
4393  DCHECK(!HasStackOverflow());
4394  DCHECK(current_block() != NULL);
4395  DCHECK(current_block()->HasPredecessor());
4396
4397  if (function_state()->IsInsideDoExpressionScope()) {
4398    return Bailout(kDoExpressionUnmodelable);
4399  }
4400
4401  Scope* outer_scope = NULL;
4402  Scope* inner_scope = scope();
4403  int drop_extra = 0;
4404  HBasicBlock* continue_block = break_scope()->Get(
4405      stmt->target(), BreakAndContinueScope::CONTINUE,
4406      &outer_scope, &drop_extra);
4407  HValue* context = environment()->context();
4408  Drop(drop_extra);
4409  int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4410  if (context_pop_count > 0) {
4411    while (context_pop_count-- > 0) {
4412      HInstruction* context_instruction = Add<HLoadNamedField>(
4413          context, nullptr,
4414          HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4415      context = context_instruction;
4416    }
4417    environment()->BindContext(context);
4418  }
4419
4420  Goto(continue_block);
4421  set_current_block(NULL);
4422}
4423
4424
4425void HOptimizedGraphBuilder::VisitBreakStatement(BreakStatement* stmt) {
4426  DCHECK(!HasStackOverflow());
4427  DCHECK(current_block() != NULL);
4428  DCHECK(current_block()->HasPredecessor());
4429
4430  if (function_state()->IsInsideDoExpressionScope()) {
4431    return Bailout(kDoExpressionUnmodelable);
4432  }
4433
4434  Scope* outer_scope = NULL;
4435  Scope* inner_scope = scope();
4436  int drop_extra = 0;
4437  HBasicBlock* break_block = break_scope()->Get(
4438      stmt->target(), BreakAndContinueScope::BREAK,
4439      &outer_scope, &drop_extra);
4440  HValue* context = environment()->context();
4441  Drop(drop_extra);
4442  int context_pop_count = inner_scope->ContextChainLength(outer_scope);
4443  if (context_pop_count > 0) {
4444    while (context_pop_count-- > 0) {
4445      HInstruction* context_instruction = Add<HLoadNamedField>(
4446          context, nullptr,
4447          HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
4448      context = context_instruction;
4449    }
4450    environment()->BindContext(context);
4451  }
4452  Goto(break_block);
4453  set_current_block(NULL);
4454}
4455
4456
4457void HOptimizedGraphBuilder::VisitReturnStatement(ReturnStatement* stmt) {
4458  DCHECK(!HasStackOverflow());
4459  DCHECK(current_block() != NULL);
4460  DCHECK(current_block()->HasPredecessor());
4461  FunctionState* state = function_state();
4462  AstContext* context = call_context();
4463  if (context == NULL) {
4464    // Not an inlined return, so an actual one.
4465    CHECK_ALIVE(VisitForValue(stmt->expression()));
4466    HValue* result = environment()->Pop();
4467    Add<HReturn>(result);
4468  } else if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
4469    // Return from an inlined construct call. In a test context the return value
4470    // will always evaluate to true, in a value context the return value needs
4471    // to be a JSObject.
4472    if (context->IsTest()) {
4473      CHECK_ALIVE(VisitForEffect(stmt->expression()));
4474      context->ReturnValue(graph()->GetConstantTrue());
4475    } else if (context->IsEffect()) {
4476      CHECK_ALIVE(VisitForEffect(stmt->expression()));
4477      Goto(function_return(), state);
4478    } else {
4479      DCHECK(context->IsValue());
4480      CHECK_ALIVE(VisitForValue(stmt->expression()));
4481      HValue* return_value = Pop();
4482      HValue* receiver = environment()->arguments_environment()->Lookup(0);
4483      HHasInstanceTypeAndBranch* typecheck =
4484          New<HHasInstanceTypeAndBranch>(return_value,
4485                                         FIRST_JS_RECEIVER_TYPE,
4486                                         LAST_JS_RECEIVER_TYPE);
4487      HBasicBlock* if_spec_object = graph()->CreateBasicBlock();
4488      HBasicBlock* not_spec_object = graph()->CreateBasicBlock();
4489      typecheck->SetSuccessorAt(0, if_spec_object);
4490      typecheck->SetSuccessorAt(1, not_spec_object);
4491      FinishCurrentBlock(typecheck);
4492      AddLeaveInlined(if_spec_object, return_value, state);
4493      AddLeaveInlined(not_spec_object, receiver, state);
4494    }
4495  } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
4496    // Return from an inlined setter call. The returned value is never used, the
4497    // value of an assignment is always the value of the RHS of the assignment.
4498    CHECK_ALIVE(VisitForEffect(stmt->expression()));
4499    if (context->IsTest()) {
4500      HValue* rhs = environment()->arguments_environment()->Lookup(1);
4501      context->ReturnValue(rhs);
4502    } else if (context->IsEffect()) {
4503      Goto(function_return(), state);
4504    } else {
4505      DCHECK(context->IsValue());
4506      HValue* rhs = environment()->arguments_environment()->Lookup(1);
4507      AddLeaveInlined(rhs, state);
4508    }
4509  } else {
4510    // Return from a normal inlined function. Visit the subexpression in the
4511    // expression context of the call.
4512    if (context->IsTest()) {
4513      TestContext* test = TestContext::cast(context);
4514      VisitForControl(stmt->expression(), test->if_true(), test->if_false());
4515    } else if (context->IsEffect()) {
4516      // Visit in value context and ignore the result. This is needed to keep
4517      // environment in sync with full-codegen since some visitors (e.g.
4518      // VisitCountOperation) use the operand stack differently depending on
4519      // context.
4520      CHECK_ALIVE(VisitForValue(stmt->expression()));
4521      Pop();
4522      Goto(function_return(), state);
4523    } else {
4524      DCHECK(context->IsValue());
4525      CHECK_ALIVE(VisitForValue(stmt->expression()));
4526      AddLeaveInlined(Pop(), state);
4527    }
4528  }
4529  set_current_block(NULL);
4530}
4531
4532
4533void HOptimizedGraphBuilder::VisitWithStatement(WithStatement* stmt) {
4534  DCHECK(!HasStackOverflow());
4535  DCHECK(current_block() != NULL);
4536  DCHECK(current_block()->HasPredecessor());
4537  return Bailout(kWithStatement);
4538}
4539
4540
4541void HOptimizedGraphBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
4542  DCHECK(!HasStackOverflow());
4543  DCHECK(current_block() != NULL);
4544  DCHECK(current_block()->HasPredecessor());
4545
4546  ZoneList<CaseClause*>* clauses = stmt->cases();
4547  int clause_count = clauses->length();
4548  ZoneList<HBasicBlock*> body_blocks(clause_count, zone());
4549
4550  CHECK_ALIVE(VisitForValue(stmt->tag()));
4551  Add<HSimulate>(stmt->EntryId());
4552  HValue* tag_value = Top();
4553  AstType* tag_type = bounds_.get(stmt->tag()).lower;
4554
4555  // 1. Build all the tests, with dangling true branches
4556  BailoutId default_id = BailoutId::None();
4557  for (int i = 0; i < clause_count; ++i) {
4558    CaseClause* clause = clauses->at(i);
4559    if (clause->is_default()) {
4560      body_blocks.Add(NULL, zone());
4561      if (default_id.IsNone()) default_id = clause->EntryId();
4562      continue;
4563    }
4564
4565    // Generate a compare and branch.
4566    CHECK_BAILOUT(VisitForValue(clause->label()));
4567    if (current_block() == NULL) return Bailout(kUnsupportedSwitchStatement);
4568    HValue* label_value = Pop();
4569
4570    AstType* label_type = bounds_.get(clause->label()).lower;
4571    AstType* combined_type = clause->compare_type();
4572    HControlInstruction* compare = BuildCompareInstruction(
4573        Token::EQ_STRICT, tag_value, label_value, tag_type, label_type,
4574        combined_type,
4575        ScriptPositionToSourcePosition(stmt->tag()->position()),
4576        ScriptPositionToSourcePosition(clause->label()->position()),
4577        PUSH_BEFORE_SIMULATE, clause->id());
4578
4579    HBasicBlock* next_test_block = graph()->CreateBasicBlock();
4580    HBasicBlock* body_block = graph()->CreateBasicBlock();
4581    body_blocks.Add(body_block, zone());
4582    compare->SetSuccessorAt(0, body_block);
4583    compare->SetSuccessorAt(1, next_test_block);
4584    FinishCurrentBlock(compare);
4585
4586    set_current_block(body_block);
4587    Drop(1);  // tag_value
4588
4589    set_current_block(next_test_block);
4590  }
4591
4592  // Save the current block to use for the default or to join with the
4593  // exit.
4594  HBasicBlock* last_block = current_block();
4595  Drop(1);  // tag_value
4596
4597  // 2. Loop over the clauses and the linked list of tests in lockstep,
4598  // translating the clause bodies.
4599  HBasicBlock* fall_through_block = NULL;
4600
4601  BreakAndContinueInfo break_info(stmt, scope());
4602  { BreakAndContinueScope push(&break_info, this);
4603    for (int i = 0; i < clause_count; ++i) {
4604      CaseClause* clause = clauses->at(i);
4605
4606      // Identify the block where normal (non-fall-through) control flow
4607      // goes to.
4608      HBasicBlock* normal_block = NULL;
4609      if (clause->is_default()) {
4610        if (last_block == NULL) continue;
4611        normal_block = last_block;
4612        last_block = NULL;  // Cleared to indicate we've handled it.
4613      } else {
4614        normal_block = body_blocks[i];
4615      }
4616
4617      if (fall_through_block == NULL) {
4618        set_current_block(normal_block);
4619      } else {
4620        HBasicBlock* join = CreateJoin(fall_through_block,
4621                                       normal_block,
4622                                       clause->EntryId());
4623        set_current_block(join);
4624      }
4625
4626      CHECK_BAILOUT(VisitStatements(clause->statements()));
4627      fall_through_block = current_block();
4628    }
4629  }
4630
4631  // Create an up-to-3-way join.  Use the break block if it exists since
4632  // it's already a join block.
4633  HBasicBlock* break_block = break_info.break_block();
4634  if (break_block == NULL) {
4635    set_current_block(CreateJoin(fall_through_block,
4636                                 last_block,
4637                                 stmt->ExitId()));
4638  } else {
4639    if (fall_through_block != NULL) Goto(fall_through_block, break_block);
4640    if (last_block != NULL) Goto(last_block, break_block);
4641    break_block->SetJoinId(stmt->ExitId());
4642    set_current_block(break_block);
4643  }
4644}
4645
4646void HOptimizedGraphBuilder::VisitLoopBody(IterationStatement* stmt,
4647                                           BailoutId stack_check_id,
4648                                           HBasicBlock* loop_entry) {
4649  Add<HSimulate>(stack_check_id);
4650  HStackCheck* stack_check =
4651      HStackCheck::cast(Add<HStackCheck>(HStackCheck::kBackwardsBranch));
4652  DCHECK(loop_entry->IsLoopHeader());
4653  loop_entry->loop_information()->set_stack_check(stack_check);
4654  CHECK_BAILOUT(Visit(stmt->body()));
4655}
4656
4657
4658void HOptimizedGraphBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
4659  DCHECK(!HasStackOverflow());
4660  DCHECK(current_block() != NULL);
4661  DCHECK(current_block()->HasPredecessor());
4662  DCHECK(current_block() != NULL);
4663  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4664
4665  BreakAndContinueInfo break_info(stmt, scope());
4666  {
4667    BreakAndContinueScope push(&break_info, this);
4668    CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4669  }
4670  HBasicBlock* body_exit = JoinContinue(
4671      stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4672  HBasicBlock* loop_successor = NULL;
4673  if (body_exit != NULL) {
4674    set_current_block(body_exit);
4675    loop_successor = graph()->CreateBasicBlock();
4676    if (stmt->cond()->ToBooleanIsFalse()) {
4677      loop_entry->loop_information()->stack_check()->Eliminate();
4678      Goto(loop_successor);
4679      body_exit = NULL;
4680    } else {
4681      // The block for a true condition, the actual predecessor block of the
4682      // back edge.
4683      body_exit = graph()->CreateBasicBlock();
4684      CHECK_BAILOUT(VisitForControl(stmt->cond(), body_exit, loop_successor));
4685    }
4686    if (body_exit != NULL && body_exit->HasPredecessor()) {
4687      body_exit->SetJoinId(stmt->BackEdgeId());
4688    } else {
4689      body_exit = NULL;
4690    }
4691    if (loop_successor->HasPredecessor()) {
4692      loop_successor->SetJoinId(stmt->ExitId());
4693    } else {
4694      loop_successor = NULL;
4695    }
4696  }
4697  HBasicBlock* loop_exit = CreateLoop(stmt,
4698                                      loop_entry,
4699                                      body_exit,
4700                                      loop_successor,
4701                                      break_info.break_block());
4702  set_current_block(loop_exit);
4703}
4704
4705
4706void HOptimizedGraphBuilder::VisitWhileStatement(WhileStatement* stmt) {
4707  DCHECK(!HasStackOverflow());
4708  DCHECK(current_block() != NULL);
4709  DCHECK(current_block()->HasPredecessor());
4710  DCHECK(current_block() != NULL);
4711  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4712
4713  // If the condition is constant true, do not generate a branch.
4714  HBasicBlock* loop_successor = NULL;
4715  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4716  loop_successor = graph()->CreateBasicBlock();
4717  CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4718  if (body_entry->HasPredecessor()) {
4719    body_entry->SetJoinId(stmt->BodyId());
4720    set_current_block(body_entry);
4721  }
4722  if (loop_successor->HasPredecessor()) {
4723    loop_successor->SetJoinId(stmt->ExitId());
4724  } else {
4725    loop_successor = NULL;
4726  }
4727
4728  BreakAndContinueInfo break_info(stmt, scope());
4729  if (current_block() != NULL) {
4730    BreakAndContinueScope push(&break_info, this);
4731    CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4732  }
4733  HBasicBlock* body_exit = JoinContinue(
4734      stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4735  HBasicBlock* loop_exit = CreateLoop(stmt,
4736                                      loop_entry,
4737                                      body_exit,
4738                                      loop_successor,
4739                                      break_info.break_block());
4740  set_current_block(loop_exit);
4741}
4742
4743
4744void HOptimizedGraphBuilder::VisitForStatement(ForStatement* stmt) {
4745  DCHECK(!HasStackOverflow());
4746  DCHECK(current_block() != NULL);
4747  DCHECK(current_block()->HasPredecessor());
4748  if (stmt->init() != NULL) {
4749    CHECK_ALIVE(Visit(stmt->init()));
4750  }
4751  DCHECK(current_block() != NULL);
4752  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4753
4754  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4755  HBasicBlock* body_entry = graph()->CreateBasicBlock();
4756  if (stmt->cond() != NULL) {
4757    CHECK_BAILOUT(VisitForControl(stmt->cond(), body_entry, loop_successor));
4758    if (body_entry->HasPredecessor()) {
4759      body_entry->SetJoinId(stmt->BodyId());
4760      set_current_block(body_entry);
4761    }
4762    if (loop_successor->HasPredecessor()) {
4763      loop_successor->SetJoinId(stmt->ExitId());
4764    } else {
4765      loop_successor = NULL;
4766    }
4767  } else {
4768    // Create dummy control flow so that variable liveness analysis
4769    // produces teh correct result.
4770    HControlInstruction* branch = New<HBranch>(graph()->GetConstantTrue());
4771    branch->SetSuccessorAt(0, body_entry);
4772    branch->SetSuccessorAt(1, loop_successor);
4773    FinishCurrentBlock(branch);
4774    set_current_block(body_entry);
4775  }
4776
4777  BreakAndContinueInfo break_info(stmt, scope());
4778  if (current_block() != NULL) {
4779    BreakAndContinueScope push(&break_info, this);
4780    CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4781  }
4782  HBasicBlock* body_exit = JoinContinue(
4783      stmt, stmt->ContinueId(), current_block(), break_info.continue_block());
4784
4785  if (stmt->next() != NULL && body_exit != NULL) {
4786    set_current_block(body_exit);
4787    CHECK_BAILOUT(Visit(stmt->next()));
4788    body_exit = current_block();
4789  }
4790
4791  HBasicBlock* loop_exit = CreateLoop(stmt,
4792                                      loop_entry,
4793                                      body_exit,
4794                                      loop_successor,
4795                                      break_info.break_block());
4796  set_current_block(loop_exit);
4797}
4798
4799
4800void HOptimizedGraphBuilder::VisitForInStatement(ForInStatement* stmt) {
4801  DCHECK(!HasStackOverflow());
4802  DCHECK(current_block() != NULL);
4803  DCHECK(current_block()->HasPredecessor());
4804
4805  if (!stmt->each()->IsVariableProxy() ||
4806      !stmt->each()->AsVariableProxy()->var()->IsStackLocal()) {
4807    return Bailout(kForInStatementWithNonLocalEachVariable);
4808  }
4809
4810  Variable* each_var = stmt->each()->AsVariableProxy()->var();
4811
4812  CHECK_ALIVE(VisitForValue(stmt->enumerable()));
4813  HValue* enumerable = Top();  // Leave enumerable at the top.
4814
4815  IfBuilder if_undefined_or_null(this);
4816  if_undefined_or_null.If<HCompareObjectEqAndBranch>(
4817      enumerable, graph()->GetConstantUndefined());
4818  if_undefined_or_null.Or();
4819  if_undefined_or_null.If<HCompareObjectEqAndBranch>(
4820      enumerable, graph()->GetConstantNull());
4821  if_undefined_or_null.ThenDeopt(DeoptimizeReason::kUndefinedOrNullInForIn);
4822  if_undefined_or_null.End();
4823  BuildForInBody(stmt, each_var, enumerable);
4824}
4825
4826
4827void HOptimizedGraphBuilder::BuildForInBody(ForInStatement* stmt,
4828                                            Variable* each_var,
4829                                            HValue* enumerable) {
4830  Handle<Map> meta_map = isolate()->factory()->meta_map();
4831  bool fast = stmt->for_in_type() == ForInStatement::FAST_FOR_IN;
4832  BuildCheckHeapObject(enumerable);
4833  Add<HCheckInstanceType>(enumerable, HCheckInstanceType::IS_JS_RECEIVER);
4834  Add<HSimulate>(stmt->ToObjectId());
4835  if (fast) {
4836    HForInPrepareMap* map = Add<HForInPrepareMap>(enumerable);
4837    Push(map);
4838    Add<HSimulate>(stmt->EnumId());
4839    Drop(1);
4840    Add<HCheckMaps>(map, meta_map);
4841
4842    HForInCacheArray* array = Add<HForInCacheArray>(
4843        enumerable, map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4844    HValue* enum_length = BuildEnumLength(map);
4845
4846    HForInCacheArray* index_cache = Add<HForInCacheArray>(
4847        enumerable, map, DescriptorArray::kEnumCacheBridgeIndicesCacheIndex);
4848    array->set_index_cache(index_cache);
4849
4850    Push(map);
4851    Push(array);
4852    Push(enum_length);
4853    Add<HSimulate>(stmt->PrepareId());
4854  } else {
4855    Runtime::FunctionId function_id = Runtime::kForInEnumerate;
4856    Add<HPushArguments>(enumerable);
4857    HCallRuntime* array =
4858        Add<HCallRuntime>(Runtime::FunctionForId(function_id), 1);
4859    Push(array);
4860    Add<HSimulate>(stmt->EnumId());
4861    Drop(1);
4862
4863    IfBuilder if_fast(this);
4864    if_fast.If<HCompareMap>(array, meta_map);
4865    if_fast.Then();
4866    {
4867      HValue* cache_map = array;
4868      HForInCacheArray* cache = Add<HForInCacheArray>(
4869          enumerable, cache_map, DescriptorArray::kEnumCacheBridgeCacheIndex);
4870      HValue* enum_length = BuildEnumLength(cache_map);
4871      Push(cache_map);
4872      Push(cache);
4873      Push(enum_length);
4874      Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
4875    }
4876    if_fast.Else();
4877    {
4878      Push(graph()->GetConstant1());
4879      Push(array);
4880      Push(AddLoadFixedArrayLength(array));
4881      Add<HSimulate>(stmt->PrepareId(), FIXED_SIMULATE);
4882    }
4883  }
4884
4885  Push(graph()->GetConstant0());
4886
4887  HBasicBlock* loop_entry = BuildLoopEntry(stmt);
4888
4889  // Reload the values to ensure we have up-to-date values inside of the loop.
4890  // This is relevant especially for OSR where the values don't come from the
4891  // computation above, but from the OSR entry block.
4892  HValue* index = environment()->ExpressionStackAt(0);
4893  HValue* limit = environment()->ExpressionStackAt(1);
4894  HValue* array = environment()->ExpressionStackAt(2);
4895  HValue* type = environment()->ExpressionStackAt(3);
4896  enumerable = environment()->ExpressionStackAt(4);
4897
4898  // Check that we still have more keys.
4899  HCompareNumericAndBranch* compare_index =
4900      New<HCompareNumericAndBranch>(index, limit, Token::LT);
4901  compare_index->set_observed_input_representation(
4902      Representation::Smi(), Representation::Smi());
4903
4904  HBasicBlock* loop_body = graph()->CreateBasicBlock();
4905  HBasicBlock* loop_successor = graph()->CreateBasicBlock();
4906
4907  compare_index->SetSuccessorAt(0, loop_body);
4908  compare_index->SetSuccessorAt(1, loop_successor);
4909  FinishCurrentBlock(compare_index);
4910
4911  set_current_block(loop_successor);
4912  Drop(5);
4913
4914  set_current_block(loop_body);
4915
4916  // Compute the next enumerated value.
4917  HValue* key = Add<HLoadKeyed>(array, index, index, nullptr, FAST_ELEMENTS);
4918
4919  HBasicBlock* continue_block = nullptr;
4920  if (fast) {
4921    // Check if expected map still matches that of the enumerable.
4922    Add<HCheckMapValue>(enumerable, type);
4923    Add<HSimulate>(stmt->FilterId());
4924  } else {
4925    // We need the continue block here to be able to skip over invalidated keys.
4926    continue_block = graph()->CreateBasicBlock();
4927
4928    // We cannot use the IfBuilder here, since we need to be able to jump
4929    // over the loop body in case of undefined result from %ForInFilter,
4930    // and the poor soul that is the IfBuilder get's really confused about
4931    // such "advanced control flow requirements".
4932    HBasicBlock* if_fast = graph()->CreateBasicBlock();
4933    HBasicBlock* if_slow = graph()->CreateBasicBlock();
4934    HBasicBlock* if_slow_pass = graph()->CreateBasicBlock();
4935    HBasicBlock* if_slow_skip = graph()->CreateBasicBlock();
4936    HBasicBlock* if_join = graph()->CreateBasicBlock();
4937
4938    // Check if expected map still matches that of the enumerable.
4939    HValue* enumerable_map =
4940        Add<HLoadNamedField>(enumerable, nullptr, HObjectAccess::ForMap());
4941    FinishCurrentBlock(
4942        New<HCompareObjectEqAndBranch>(enumerable_map, type, if_fast, if_slow));
4943    set_current_block(if_fast);
4944    {
4945      // The enum cache for enumerable is still valid, no need to check key.
4946      Push(key);
4947      Goto(if_join);
4948    }
4949    set_current_block(if_slow);
4950    {
4951      Callable callable = CodeFactory::ForInFilter(isolate());
4952      HValue* values[] = {key, enumerable};
4953      HConstant* stub_value = Add<HConstant>(callable.code());
4954      Push(Add<HCallWithDescriptor>(stub_value, 0, callable.descriptor(),
4955                                    ArrayVector(values)));
4956      Add<HSimulate>(stmt->FilterId());
4957      FinishCurrentBlock(New<HCompareObjectEqAndBranch>(
4958          Top(), graph()->GetConstantUndefined(), if_slow_skip, if_slow_pass));
4959    }
4960    set_current_block(if_slow_pass);
4961    { Goto(if_join); }
4962    set_current_block(if_slow_skip);
4963    {
4964      // The key is no longer valid for enumerable, skip it.
4965      Drop(1);
4966      Goto(continue_block);
4967    }
4968    if_join->SetJoinId(stmt->FilterId());
4969    set_current_block(if_join);
4970    key = Pop();
4971  }
4972
4973  Bind(each_var, key);
4974  Add<HSimulate>(stmt->AssignmentId());
4975
4976  BreakAndContinueInfo break_info(stmt, scope(), 5);
4977  break_info.set_continue_block(continue_block);
4978  {
4979    BreakAndContinueScope push(&break_info, this);
4980    CHECK_BAILOUT(VisitLoopBody(stmt, stmt->StackCheckId(), loop_entry));
4981  }
4982
4983  HBasicBlock* body_exit = JoinContinue(
4984      stmt, stmt->IncrementId(), current_block(), break_info.continue_block());
4985
4986  if (body_exit != NULL) {
4987    set_current_block(body_exit);
4988
4989    HValue* current_index = Pop();
4990    HValue* increment =
4991        AddUncasted<HAdd>(current_index, graph()->GetConstant1());
4992    increment->ClearFlag(HValue::kCanOverflow);
4993    Push(increment);
4994    body_exit = current_block();
4995  }
4996
4997  HBasicBlock* loop_exit = CreateLoop(stmt,
4998                                      loop_entry,
4999                                      body_exit,
5000                                      loop_successor,
5001                                      break_info.break_block());
5002
5003  set_current_block(loop_exit);
5004}
5005
5006
5007void HOptimizedGraphBuilder::VisitForOfStatement(ForOfStatement* stmt) {
5008  DCHECK(!HasStackOverflow());
5009  DCHECK(current_block() != NULL);
5010  DCHECK(current_block()->HasPredecessor());
5011  return Bailout(kForOfStatement);
5012}
5013
5014
5015void HOptimizedGraphBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
5016  DCHECK(!HasStackOverflow());
5017  DCHECK(current_block() != NULL);
5018  DCHECK(current_block()->HasPredecessor());
5019  return Bailout(kTryCatchStatement);
5020}
5021
5022
5023void HOptimizedGraphBuilder::VisitTryFinallyStatement(
5024    TryFinallyStatement* stmt) {
5025  DCHECK(!HasStackOverflow());
5026  DCHECK(current_block() != NULL);
5027  DCHECK(current_block()->HasPredecessor());
5028  return Bailout(kTryFinallyStatement);
5029}
5030
5031
5032void HOptimizedGraphBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
5033  DCHECK(!HasStackOverflow());
5034  DCHECK(current_block() != NULL);
5035  DCHECK(current_block()->HasPredecessor());
5036  return Bailout(kDebuggerStatement);
5037}
5038
5039
5040void HOptimizedGraphBuilder::VisitCaseClause(CaseClause* clause) {
5041  UNREACHABLE();
5042}
5043
5044
5045void HOptimizedGraphBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
5046  DCHECK(!HasStackOverflow());
5047  DCHECK(current_block() != NULL);
5048  DCHECK(current_block()->HasPredecessor());
5049  Handle<SharedFunctionInfo> shared_info = Compiler::GetSharedFunctionInfo(
5050      expr, current_info()->script(), top_info());
5051  // We also have a stack overflow if the recursive compilation did.
5052  if (HasStackOverflow()) return;
5053  // Use the fast case closure allocation code that allocates in new
5054  // space for nested functions that don't need pretenuring.
5055  HConstant* shared_info_value = Add<HConstant>(shared_info);
5056  HInstruction* instr;
5057  Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
5058  HValue* vector_value = Add<HConstant>(vector);
5059  int index = FeedbackVector::GetIndex(expr->LiteralFeedbackSlot());
5060  HValue* index_value = Add<HConstant>(index);
5061  if (!expr->pretenure()) {
5062    Callable callable = CodeFactory::FastNewClosure(isolate());
5063    HValue* values[] = {shared_info_value, vector_value, index_value};
5064    HConstant* stub_value = Add<HConstant>(callable.code());
5065    instr = New<HCallWithDescriptor>(stub_value, 0, callable.descriptor(),
5066                                     ArrayVector(values));
5067  } else {
5068    Add<HPushArguments>(shared_info_value);
5069    Add<HPushArguments>(vector_value);
5070    Add<HPushArguments>(index_value);
5071    Runtime::FunctionId function_id =
5072        expr->pretenure() ? Runtime::kNewClosure_Tenured : Runtime::kNewClosure;
5073    instr = New<HCallRuntime>(Runtime::FunctionForId(function_id), 3);
5074  }
5075  return ast_context()->ReturnInstruction(instr, expr->id());
5076}
5077
5078
5079void HOptimizedGraphBuilder::VisitClassLiteral(ClassLiteral* lit) {
5080  DCHECK(!HasStackOverflow());
5081  DCHECK(current_block() != NULL);
5082  DCHECK(current_block()->HasPredecessor());
5083  return Bailout(kClassLiteral);
5084}
5085
5086
5087void HOptimizedGraphBuilder::VisitNativeFunctionLiteral(
5088    NativeFunctionLiteral* expr) {
5089  DCHECK(!HasStackOverflow());
5090  DCHECK(current_block() != NULL);
5091  DCHECK(current_block()->HasPredecessor());
5092  return Bailout(kNativeFunctionLiteral);
5093}
5094
5095
5096void HOptimizedGraphBuilder::VisitDoExpression(DoExpression* expr) {
5097  DoExpressionScope scope(this);
5098  DCHECK(!HasStackOverflow());
5099  DCHECK(current_block() != NULL);
5100  DCHECK(current_block()->HasPredecessor());
5101  CHECK_ALIVE(VisitBlock(expr->block()));
5102  Visit(expr->result());
5103}
5104
5105
5106void HOptimizedGraphBuilder::VisitConditional(Conditional* expr) {
5107  DCHECK(!HasStackOverflow());
5108  DCHECK(current_block() != NULL);
5109  DCHECK(current_block()->HasPredecessor());
5110  HBasicBlock* cond_true = graph()->CreateBasicBlock();
5111  HBasicBlock* cond_false = graph()->CreateBasicBlock();
5112  CHECK_BAILOUT(VisitForControl(expr->condition(), cond_true, cond_false));
5113
5114  // Visit the true and false subexpressions in the same AST context as the
5115  // whole expression.
5116  if (cond_true->HasPredecessor()) {
5117    cond_true->SetJoinId(expr->ThenId());
5118    set_current_block(cond_true);
5119    CHECK_BAILOUT(Visit(expr->then_expression()));
5120    cond_true = current_block();
5121  } else {
5122    cond_true = NULL;
5123  }
5124
5125  if (cond_false->HasPredecessor()) {
5126    cond_false->SetJoinId(expr->ElseId());
5127    set_current_block(cond_false);
5128    CHECK_BAILOUT(Visit(expr->else_expression()));
5129    cond_false = current_block();
5130  } else {
5131    cond_false = NULL;
5132  }
5133
5134  if (!ast_context()->IsTest()) {
5135    HBasicBlock* join = CreateJoin(cond_true, cond_false, expr->id());
5136    set_current_block(join);
5137    if (join != NULL && !ast_context()->IsEffect()) {
5138      return ast_context()->ReturnValue(Pop());
5139    }
5140  }
5141}
5142
5143bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
5144    Variable* var, LookupIterator* it, PropertyAccessType access_type) {
5145  if (var->is_this()) return false;
5146  return CanInlineGlobalPropertyAccess(it, access_type);
5147}
5148
5149bool HOptimizedGraphBuilder::CanInlineGlobalPropertyAccess(
5150    LookupIterator* it, PropertyAccessType access_type) {
5151  if (!current_info()->has_global_object()) {
5152    return false;
5153  }
5154
5155  switch (it->state()) {
5156    case LookupIterator::ACCESSOR:
5157    case LookupIterator::ACCESS_CHECK:
5158    case LookupIterator::INTERCEPTOR:
5159    case LookupIterator::INTEGER_INDEXED_EXOTIC:
5160    case LookupIterator::NOT_FOUND:
5161      return false;
5162    case LookupIterator::DATA:
5163      if (access_type == STORE && it->IsReadOnly()) return false;
5164      if (!it->GetHolder<JSObject>()->IsJSGlobalObject()) return false;
5165      return true;
5166    case LookupIterator::JSPROXY:
5167    case LookupIterator::TRANSITION:
5168      UNREACHABLE();
5169  }
5170  UNREACHABLE();
5171  return false;
5172}
5173
5174
5175HValue* HOptimizedGraphBuilder::BuildContextChainWalk(Variable* var) {
5176  DCHECK(var->IsContextSlot());
5177  HValue* context = environment()->context();
5178  int length = scope()->ContextChainLength(var->scope());
5179  while (length-- > 0) {
5180    context = Add<HLoadNamedField>(
5181        context, nullptr,
5182        HObjectAccess::ForContextSlot(Context::PREVIOUS_INDEX));
5183  }
5184  return context;
5185}
5186
5187void HOptimizedGraphBuilder::InlineGlobalPropertyLoad(LookupIterator* it,
5188                                                      BailoutId ast_id) {
5189  Handle<PropertyCell> cell = it->GetPropertyCell();
5190  top_info()->dependencies()->AssumePropertyCell(cell);
5191  auto cell_type = it->property_details().cell_type();
5192  if (cell_type == PropertyCellType::kConstant ||
5193      cell_type == PropertyCellType::kUndefined) {
5194    Handle<Object> constant_object(cell->value(), isolate());
5195    if (constant_object->IsConsString()) {
5196      constant_object = String::Flatten(Handle<String>::cast(constant_object));
5197    }
5198    HConstant* constant = New<HConstant>(constant_object);
5199    return ast_context()->ReturnInstruction(constant, ast_id);
5200  } else {
5201    auto access = HObjectAccess::ForPropertyCellValue();
5202    UniqueSet<Map>* field_maps = nullptr;
5203    if (cell_type == PropertyCellType::kConstantType) {
5204      switch (cell->GetConstantType()) {
5205        case PropertyCellConstantType::kSmi:
5206          access = access.WithRepresentation(Representation::Smi());
5207          break;
5208        case PropertyCellConstantType::kStableMap: {
5209          // Check that the map really is stable. The heap object could
5210          // have mutated without the cell updating state. In that case,
5211          // make no promises about the loaded value except that it's a
5212          // heap object.
5213          access = access.WithRepresentation(Representation::HeapObject());
5214          Handle<Map> map(HeapObject::cast(cell->value())->map());
5215          if (map->is_stable()) {
5216            field_maps = new (zone())
5217                UniqueSet<Map>(Unique<Map>::CreateImmovable(map), zone());
5218          }
5219          break;
5220        }
5221      }
5222    }
5223    HConstant* cell_constant = Add<HConstant>(cell);
5224    HLoadNamedField* instr;
5225    if (field_maps == nullptr) {
5226      instr = New<HLoadNamedField>(cell_constant, nullptr, access);
5227    } else {
5228      instr = New<HLoadNamedField>(cell_constant, nullptr, access, field_maps,
5229                                   HType::HeapObject());
5230    }
5231    instr->ClearDependsOnFlag(kInobjectFields);
5232    instr->SetDependsOnFlag(kGlobalVars);
5233    return ast_context()->ReturnInstruction(instr, ast_id);
5234  }
5235}
5236
5237void HOptimizedGraphBuilder::VisitVariableProxy(VariableProxy* expr) {
5238  DCHECK(!HasStackOverflow());
5239  DCHECK(current_block() != NULL);
5240  DCHECK(current_block()->HasPredecessor());
5241  Variable* variable = expr->var();
5242  switch (variable->location()) {
5243    case VariableLocation::UNALLOCATED: {
5244      if (IsLexicalVariableMode(variable->mode())) {
5245        // TODO(rossberg): should this be an DCHECK?
5246        return Bailout(kReferenceToGlobalLexicalVariable);
5247      }
5248      // Handle known global constants like 'undefined' specially to avoid a
5249      // load from a global cell for them.
5250      Handle<Object> constant_value =
5251          isolate()->factory()->GlobalConstantFor(variable->name());
5252      if (!constant_value.is_null()) {
5253        HConstant* instr = New<HConstant>(constant_value);
5254        return ast_context()->ReturnInstruction(instr, expr->id());
5255      }
5256
5257      Handle<JSGlobalObject> global(current_info()->global_object());
5258
5259      // Lookup in script contexts.
5260      {
5261        Handle<ScriptContextTable> script_contexts(
5262            global->native_context()->script_context_table());
5263        ScriptContextTable::LookupResult lookup;
5264        if (ScriptContextTable::Lookup(script_contexts, variable->name(),
5265                                       &lookup)) {
5266          Handle<Context> script_context = ScriptContextTable::GetContext(
5267              script_contexts, lookup.context_index);
5268          Handle<Object> current_value =
5269              FixedArray::get(*script_context, lookup.slot_index, isolate());
5270
5271          // If the values is not the hole, it will stay initialized,
5272          // so no need to generate a check.
5273          if (current_value->IsTheHole(isolate())) {
5274            return Bailout(kReferenceToUninitializedVariable);
5275          }
5276          HInstruction* result = New<HLoadNamedField>(
5277              Add<HConstant>(script_context), nullptr,
5278              HObjectAccess::ForContextSlot(lookup.slot_index));
5279          return ast_context()->ReturnInstruction(result, expr->id());
5280        }
5281      }
5282
5283      LookupIterator it(global, variable->name(), LookupIterator::OWN);
5284      it.TryLookupCachedProperty();
5285      if (CanInlineGlobalPropertyAccess(variable, &it, LOAD)) {
5286        InlineGlobalPropertyLoad(&it, expr->id());
5287        return;
5288      } else {
5289        Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
5290        FeedbackSlot slot = expr->VariableFeedbackSlot();
5291        DCHECK(vector->IsLoadGlobalIC(slot));
5292
5293        HValue* vector_value = Add<HConstant>(vector);
5294        HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
5295        Callable callable = CodeFactory::LoadGlobalICInOptimizedCode(
5296            isolate(), ast_context()->typeof_mode());
5297        HValue* stub = Add<HConstant>(callable.code());
5298        HValue* name = Add<HConstant>(variable->name());
5299        HValue* values[] = {name, slot_value, vector_value};
5300        HCallWithDescriptor* instr = New<HCallWithDescriptor>(
5301            Code::LOAD_GLOBAL_IC, stub, 0, callable.descriptor(),
5302            ArrayVector(values));
5303        return ast_context()->ReturnInstruction(instr, expr->id());
5304      }
5305    }
5306
5307    case VariableLocation::PARAMETER:
5308    case VariableLocation::LOCAL: {
5309      HValue* value = LookupAndMakeLive(variable);
5310      if (value == graph()->GetConstantHole()) {
5311        DCHECK(IsDeclaredVariableMode(variable->mode()) &&
5312               variable->mode() != VAR);
5313        return Bailout(kReferenceToUninitializedVariable);
5314      }
5315      return ast_context()->ReturnValue(value);
5316    }
5317
5318    case VariableLocation::CONTEXT: {
5319      HValue* context = BuildContextChainWalk(variable);
5320      HLoadContextSlot::Mode mode;
5321      switch (variable->mode()) {
5322        case LET:
5323        case CONST:
5324          mode = HLoadContextSlot::kCheckDeoptimize;
5325          break;
5326        default:
5327          mode = HLoadContextSlot::kNoCheck;
5328          break;
5329      }
5330      HLoadContextSlot* instr =
5331          new(zone()) HLoadContextSlot(context, variable->index(), mode);
5332      return ast_context()->ReturnInstruction(instr, expr->id());
5333    }
5334
5335    case VariableLocation::LOOKUP:
5336      return Bailout(kReferenceToAVariableWhichRequiresDynamicLookup);
5337
5338    case VariableLocation::MODULE:
5339      UNREACHABLE();
5340  }
5341}
5342
5343
5344void HOptimizedGraphBuilder::VisitLiteral(Literal* expr) {
5345  DCHECK(!HasStackOverflow());
5346  DCHECK(current_block() != NULL);
5347  DCHECK(current_block()->HasPredecessor());
5348  HConstant* instr = New<HConstant>(expr->value());
5349  return ast_context()->ReturnInstruction(instr, expr->id());
5350}
5351
5352
5353void HOptimizedGraphBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
5354  DCHECK(!HasStackOverflow());
5355  DCHECK(current_block() != NULL);
5356  DCHECK(current_block()->HasPredecessor());
5357  Callable callable = CodeFactory::FastCloneRegExp(isolate());
5358  int index = FeedbackVector::GetIndex(expr->literal_slot());
5359  HValue* values[] = {AddThisFunction(), Add<HConstant>(index),
5360                      Add<HConstant>(expr->pattern()),
5361                      Add<HConstant>(expr->flags())};
5362  HConstant* stub_value = Add<HConstant>(callable.code());
5363  HInstruction* instr = New<HCallWithDescriptor>(
5364      stub_value, 0, callable.descriptor(), ArrayVector(values));
5365  return ast_context()->ReturnInstruction(instr, expr->id());
5366}
5367
5368
5369static bool CanInlinePropertyAccess(Handle<Map> map) {
5370  if (map->instance_type() == HEAP_NUMBER_TYPE) return true;
5371  if (map->instance_type() < FIRST_NONSTRING_TYPE) return true;
5372  return map->IsJSObjectMap() && !map->is_dictionary_map() &&
5373         !map->has_named_interceptor() &&
5374         // TODO(verwaest): Whitelist contexts to which we have access.
5375         !map->is_access_check_needed();
5376}
5377
5378
5379// Determines whether the given array or object literal boilerplate satisfies
5380// all limits to be considered for fast deep-copying and computes the total
5381// size of all objects that are part of the graph.
5382static bool IsFastLiteral(Handle<JSObject> boilerplate,
5383                          int max_depth,
5384                          int* max_properties) {
5385  if (boilerplate->map()->is_deprecated() &&
5386      !JSObject::TryMigrateInstance(boilerplate)) {
5387    return false;
5388  }
5389
5390  DCHECK(max_depth >= 0 && *max_properties >= 0);
5391  if (max_depth == 0) return false;
5392
5393  Isolate* isolate = boilerplate->GetIsolate();
5394  Handle<FixedArrayBase> elements(boilerplate->elements());
5395  if (elements->length() > 0 &&
5396      elements->map() != isolate->heap()->fixed_cow_array_map()) {
5397    if (boilerplate->HasFastSmiOrObjectElements()) {
5398      Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
5399      int length = elements->length();
5400      for (int i = 0; i < length; i++) {
5401        if ((*max_properties)-- == 0) return false;
5402        Handle<Object> value(fast_elements->get(i), isolate);
5403        if (value->IsJSObject()) {
5404          Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5405          if (!IsFastLiteral(value_object,
5406                             max_depth - 1,
5407                             max_properties)) {
5408            return false;
5409          }
5410        }
5411      }
5412    } else if (boilerplate->HasFastDoubleElements()) {
5413      if (elements->Size() > kMaxRegularHeapObjectSize) return false;
5414    } else {
5415      return false;
5416    }
5417  }
5418
5419  Handle<FixedArray> properties(boilerplate->properties());
5420  if (properties->length() > 0) {
5421    return false;
5422  } else {
5423    Handle<DescriptorArray> descriptors(
5424        boilerplate->map()->instance_descriptors());
5425    int limit = boilerplate->map()->NumberOfOwnDescriptors();
5426    for (int i = 0; i < limit; i++) {
5427      PropertyDetails details = descriptors->GetDetails(i);
5428      if (details.location() != kField) continue;
5429      DCHECK_EQ(kData, details.kind());
5430      if ((*max_properties)-- == 0) return false;
5431      FieldIndex field_index = FieldIndex::ForDescriptor(boilerplate->map(), i);
5432      if (boilerplate->IsUnboxedDoubleField(field_index)) continue;
5433      Handle<Object> value(boilerplate->RawFastPropertyAt(field_index),
5434                           isolate);
5435      if (value->IsJSObject()) {
5436        Handle<JSObject> value_object = Handle<JSObject>::cast(value);
5437        if (!IsFastLiteral(value_object,
5438                           max_depth - 1,
5439                           max_properties)) {
5440          return false;
5441        }
5442      }
5443    }
5444  }
5445  return true;
5446}
5447
5448
5449void HOptimizedGraphBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
5450  DCHECK(!HasStackOverflow());
5451  DCHECK(current_block() != NULL);
5452  DCHECK(current_block()->HasPredecessor());
5453
5454  Handle<JSFunction> closure = function_state()->compilation_info()->closure();
5455  HInstruction* literal;
5456
5457  // Check whether to use fast or slow deep-copying for boilerplate.
5458  int max_properties = kMaxFastLiteralProperties;
5459  Handle<Object> literals_cell(
5460      closure->feedback_vector()->Get(expr->literal_slot()), isolate());
5461  Handle<AllocationSite> site;
5462  Handle<JSObject> boilerplate;
5463  if (!literals_cell->IsUndefined(isolate())) {
5464    // Retrieve the boilerplate
5465    site = Handle<AllocationSite>::cast(literals_cell);
5466    boilerplate = Handle<JSObject>(JSObject::cast(site->transition_info()),
5467                                   isolate());
5468  }
5469
5470  if (!boilerplate.is_null() &&
5471      IsFastLiteral(boilerplate, kMaxFastLiteralDepth, &max_properties)) {
5472    AllocationSiteUsageContext site_context(isolate(), site, false);
5473    site_context.EnterNewScope();
5474    literal = BuildFastLiteral(boilerplate, &site_context);
5475    site_context.ExitScope(site, boilerplate);
5476  } else {
5477    NoObservableSideEffectsScope no_effects(this);
5478    Handle<BoilerplateDescription> constant_properties =
5479        expr->GetOrBuildConstantProperties(isolate());
5480    int literal_index = FeedbackVector::GetIndex(expr->literal_slot());
5481    int flags = expr->ComputeFlags(true);
5482
5483    Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
5484                        Add<HConstant>(constant_properties),
5485                        Add<HConstant>(flags));
5486
5487    Runtime::FunctionId function_id = Runtime::kCreateObjectLiteral;
5488    literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
5489  }
5490
5491  // The object is expected in the bailout environment during computation
5492  // of the property values and is the value of the entire expression.
5493  Push(literal);
5494  for (int i = 0; i < expr->properties()->length(); i++) {
5495    ObjectLiteral::Property* property = expr->properties()->at(i);
5496    if (property->is_computed_name()) return Bailout(kComputedPropertyName);
5497    if (property->IsCompileTimeValue()) continue;
5498
5499    Literal* key = property->key()->AsLiteral();
5500    Expression* value = property->value();
5501
5502    switch (property->kind()) {
5503      case ObjectLiteral::Property::MATERIALIZED_LITERAL:
5504        DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
5505        // Fall through.
5506      case ObjectLiteral::Property::COMPUTED:
5507        // It is safe to use [[Put]] here because the boilerplate already
5508        // contains computed properties with an uninitialized value.
5509        if (key->IsStringLiteral()) {
5510          DCHECK(key->IsPropertyName());
5511          if (property->emit_store()) {
5512            CHECK_ALIVE(VisitForValue(value));
5513            HValue* value = Pop();
5514
5515            Handle<Map> map = property->GetReceiverType();
5516            Handle<String> name = key->AsPropertyName();
5517            HValue* store;
5518            FeedbackSlot slot = property->GetSlot();
5519            if (map.is_null()) {
5520              // If we don't know the monomorphic type, do a generic store.
5521              CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot, literal,
5522                                                    name, value));
5523            } else {
5524              PropertyAccessInfo info(this, STORE, map, name);
5525              if (info.CanAccessMonomorphic()) {
5526                HValue* checked_literal = Add<HCheckMaps>(literal, map);
5527                DCHECK(!info.IsAccessorConstant());
5528                info.MarkAsInitializingStore();
5529                store = BuildMonomorphicAccess(
5530                    &info, literal, checked_literal, value,
5531                    BailoutId::None(), BailoutId::None());
5532                DCHECK_NOT_NULL(store);
5533              } else {
5534                CHECK_ALIVE(store = BuildNamedGeneric(STORE, NULL, slot,
5535                                                      literal, name, value));
5536              }
5537            }
5538            if (store->IsInstruction()) {
5539              AddInstruction(HInstruction::cast(store));
5540            }
5541            DCHECK(store->HasObservableSideEffects());
5542            Add<HSimulate>(key->id(), REMOVABLE_SIMULATE);
5543
5544            // Add [[HomeObject]] to function literals.
5545            if (FunctionLiteral::NeedsHomeObject(property->value())) {
5546              Handle<Symbol> sym = isolate()->factory()->home_object_symbol();
5547              HInstruction* store_home = BuildNamedGeneric(
5548                  STORE, NULL, property->GetSlot(1), value, sym, literal);
5549              AddInstruction(store_home);
5550              DCHECK(store_home->HasObservableSideEffects());
5551              Add<HSimulate>(property->value()->id(), REMOVABLE_SIMULATE);
5552            }
5553          } else {
5554            CHECK_ALIVE(VisitForEffect(value));
5555          }
5556          break;
5557        }
5558        // Fall through.
5559      case ObjectLiteral::Property::PROTOTYPE:
5560      case ObjectLiteral::Property::SETTER:
5561      case ObjectLiteral::Property::GETTER:
5562        return Bailout(kObjectLiteralWithComplexProperty);
5563      default: UNREACHABLE();
5564    }
5565  }
5566
5567  return ast_context()->ReturnValue(Pop());
5568}
5569
5570
5571void HOptimizedGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
5572  DCHECK(!HasStackOverflow());
5573  DCHECK(current_block() != NULL);
5574  DCHECK(current_block()->HasPredecessor());
5575  ZoneList<Expression*>* subexprs = expr->values();
5576  int length = subexprs->length();
5577  HInstruction* literal;
5578
5579  Handle<AllocationSite> site;
5580  Handle<FeedbackVector> vector(environment()->closure()->feedback_vector(),
5581                                isolate());
5582  Handle<Object> literals_cell(vector->Get(expr->literal_slot()), isolate());
5583  Handle<JSObject> boilerplate_object;
5584  if (!literals_cell->IsUndefined(isolate())) {
5585    DCHECK(literals_cell->IsAllocationSite());
5586    site = Handle<AllocationSite>::cast(literals_cell);
5587    boilerplate_object = Handle<JSObject>(
5588        JSObject::cast(site->transition_info()), isolate());
5589  }
5590
5591  // Check whether to use fast or slow deep-copying for boilerplate.
5592  int max_properties = kMaxFastLiteralProperties;
5593  if (!boilerplate_object.is_null() &&
5594      IsFastLiteral(boilerplate_object, kMaxFastLiteralDepth,
5595                    &max_properties)) {
5596    DCHECK(site->SitePointsToLiteral());
5597    AllocationSiteUsageContext site_context(isolate(), site, false);
5598    site_context.EnterNewScope();
5599    literal = BuildFastLiteral(boilerplate_object, &site_context);
5600    site_context.ExitScope(site, boilerplate_object);
5601  } else {
5602    NoObservableSideEffectsScope no_effects(this);
5603    Handle<ConstantElementsPair> constants =
5604        expr->GetOrBuildConstantElements(isolate());
5605    int literal_index = FeedbackVector::GetIndex(expr->literal_slot());
5606    int flags = expr->ComputeFlags(true);
5607
5608    Add<HPushArguments>(AddThisFunction(), Add<HConstant>(literal_index),
5609                        Add<HConstant>(constants), Add<HConstant>(flags));
5610
5611    Runtime::FunctionId function_id = Runtime::kCreateArrayLiteral;
5612    literal = Add<HCallRuntime>(Runtime::FunctionForId(function_id), 4);
5613
5614    // Register to deopt if the boilerplate ElementsKind changes.
5615    if (!site.is_null()) {
5616      top_info()->dependencies()->AssumeTransitionStable(site);
5617    }
5618  }
5619
5620  // The array is expected in the bailout environment during computation
5621  // of the property values and is the value of the entire expression.
5622  Push(literal);
5623
5624  HInstruction* elements = NULL;
5625
5626  for (int i = 0; i < length; i++) {
5627    Expression* subexpr = subexprs->at(i);
5628    DCHECK(!subexpr->IsSpread());
5629
5630    // If the subexpression is a literal or a simple materialized literal it
5631    // is already set in the cloned array.
5632    if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
5633
5634    CHECK_ALIVE(VisitForValue(subexpr));
5635    HValue* value = Pop();
5636    if (!Smi::IsValid(i)) return Bailout(kNonSmiKeyInArrayLiteral);
5637
5638    elements = AddLoadElements(literal);
5639
5640    HValue* key = Add<HConstant>(i);
5641
5642    if (!boilerplate_object.is_null()) {
5643      ElementsKind boilerplate_elements_kind =
5644          boilerplate_object->GetElementsKind();
5645      switch (boilerplate_elements_kind) {
5646        case FAST_SMI_ELEMENTS:
5647        case FAST_HOLEY_SMI_ELEMENTS:
5648        case FAST_ELEMENTS:
5649        case FAST_HOLEY_ELEMENTS:
5650        case FAST_DOUBLE_ELEMENTS:
5651        case FAST_HOLEY_DOUBLE_ELEMENTS: {
5652          Add<HStoreKeyed>(elements, key, value, nullptr,
5653                           boilerplate_elements_kind);
5654          break;
5655        }
5656        default:
5657          UNREACHABLE();
5658          break;
5659      }
5660    } else {
5661      HInstruction* instr = BuildKeyedGeneric(
5662          STORE, expr, expr->LiteralFeedbackSlot(), literal, key, value);
5663      AddInstruction(instr);
5664    }
5665
5666    Add<HSimulate>(expr->GetIdForElement(i));
5667  }
5668
5669  return ast_context()->ReturnValue(Pop());
5670}
5671
5672
5673HCheckMaps* HOptimizedGraphBuilder::AddCheckMap(HValue* object,
5674                                                Handle<Map> map) {
5675  BuildCheckHeapObject(object);
5676  return Add<HCheckMaps>(object, map);
5677}
5678
5679
5680HInstruction* HOptimizedGraphBuilder::BuildLoadNamedField(
5681    PropertyAccessInfo* info,
5682    HValue* checked_object) {
5683  // Check if this is a load of an immutable or constant property.
5684  if (checked_object->ActualValue()->IsConstant()) {
5685    Handle<Object> object(
5686        HConstant::cast(checked_object->ActualValue())->handle(isolate()));
5687
5688    if (object->IsJSObject()) {
5689      LookupIterator it(object, info->name(),
5690                        LookupIterator::OWN_SKIP_INTERCEPTOR);
5691      if (it.IsFound()) {
5692        bool is_reaonly_non_configurable =
5693            it.IsReadOnly() && !it.IsConfigurable();
5694        if (is_reaonly_non_configurable ||
5695            (FLAG_track_constant_fields && info->IsDataConstantField())) {
5696          Handle<Object> value = JSReceiver::GetDataProperty(&it);
5697          if (!is_reaonly_non_configurable) {
5698            DCHECK(!it.is_dictionary_holder());
5699            // Add dependency on the map that introduced the field.
5700            Handle<Map> field_owner_map = it.GetFieldOwnerMap();
5701            top_info()->dependencies()->AssumeFieldOwner(field_owner_map);
5702          }
5703          return New<HConstant>(value);
5704        }
5705      }
5706    }
5707  }
5708
5709  HObjectAccess access = info->access();
5710  if (access.representation().IsDouble() &&
5711      (!FLAG_unbox_double_fields || !access.IsInobject())) {
5712    // Load the heap number.
5713    checked_object = Add<HLoadNamedField>(
5714        checked_object, nullptr,
5715        access.WithRepresentation(Representation::Tagged()));
5716    // Load the double value from it.
5717    access = HObjectAccess::ForHeapNumberValue();
5718  }
5719
5720  SmallMapList* map_list = info->field_maps();
5721  if (map_list->length() == 0) {
5722    return New<HLoadNamedField>(checked_object, checked_object, access);
5723  }
5724
5725  UniqueSet<Map>* maps = new(zone()) UniqueSet<Map>(map_list->length(), zone());
5726  for (int i = 0; i < map_list->length(); ++i) {
5727    maps->Add(Unique<Map>::CreateImmovable(map_list->at(i)), zone());
5728  }
5729  return New<HLoadNamedField>(
5730      checked_object, checked_object, access, maps, info->field_type());
5731}
5732
5733HValue* HOptimizedGraphBuilder::BuildStoreNamedField(PropertyAccessInfo* info,
5734                                                     HValue* checked_object,
5735                                                     HValue* value) {
5736  bool transition_to_field = info->IsTransition();
5737  // TODO(verwaest): Move this logic into PropertyAccessInfo.
5738  HObjectAccess field_access = info->access();
5739
5740  bool store_to_constant_field = FLAG_track_constant_fields &&
5741                                 info->StoreMode() != INITIALIZING_STORE &&
5742                                 info->IsDataConstantField();
5743
5744  HStoreNamedField *instr;
5745  if (field_access.representation().IsDouble() &&
5746      (!FLAG_unbox_double_fields || !field_access.IsInobject())) {
5747    HObjectAccess heap_number_access =
5748        field_access.WithRepresentation(Representation::Tagged());
5749    if (transition_to_field) {
5750      // The store requires a mutable HeapNumber to be allocated.
5751      NoObservableSideEffectsScope no_side_effects(this);
5752      HInstruction* heap_number_size = Add<HConstant>(HeapNumber::kSize);
5753
5754      // TODO(hpayer): Allocation site pretenuring support.
5755      HInstruction* heap_number =
5756          Add<HAllocate>(heap_number_size, HType::HeapObject(), NOT_TENURED,
5757                         MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
5758      AddStoreMapConstant(
5759          heap_number, isolate()->factory()->mutable_heap_number_map());
5760      Add<HStoreNamedField>(heap_number, HObjectAccess::ForHeapNumberValue(),
5761                            value);
5762      instr = New<HStoreNamedField>(checked_object->ActualValue(),
5763                                    heap_number_access,
5764                                    heap_number);
5765    } else {
5766      // Already holds a HeapNumber; load the box and write its value field.
5767      HInstruction* heap_number =
5768          Add<HLoadNamedField>(checked_object, nullptr, heap_number_access);
5769
5770      if (store_to_constant_field) {
5771        // If the field is constant check that the value we are going to store
5772        // matches current value.
5773        HInstruction* current_value = Add<HLoadNamedField>(
5774            heap_number, nullptr, HObjectAccess::ForHeapNumberValue());
5775        IfBuilder value_checker(this);
5776        value_checker.IfNot<HCompareNumericAndBranch>(current_value, value,
5777                                                      Token::EQ);
5778        value_checker.ThenDeopt(DeoptimizeReason::kValueMismatch);
5779        value_checker.End();
5780        return nullptr;
5781
5782      } else {
5783        instr = New<HStoreNamedField>(heap_number,
5784                                      HObjectAccess::ForHeapNumberValue(),
5785                                      value, STORE_TO_INITIALIZED_ENTRY);
5786      }
5787    }
5788  } else {
5789    if (store_to_constant_field) {
5790      // If the field is constant check that the value we are going to store
5791      // matches current value.
5792      HInstruction* current_value = Add<HLoadNamedField>(
5793          checked_object->ActualValue(), checked_object, field_access);
5794
5795      IfBuilder value_checker(this);
5796      if (field_access.representation().IsDouble()) {
5797        value_checker.IfNot<HCompareNumericAndBranch>(current_value, value,
5798                                                      Token::EQ);
5799      } else {
5800        value_checker.IfNot<HCompareObjectEqAndBranch>(current_value, value);
5801      }
5802      value_checker.ThenDeopt(DeoptimizeReason::kValueMismatch);
5803      value_checker.End();
5804      return nullptr;
5805
5806    } else {
5807      if (field_access.representation().IsHeapObject()) {
5808        BuildCheckHeapObject(value);
5809      }
5810
5811      if (!info->field_maps()->is_empty()) {
5812        DCHECK(field_access.representation().IsHeapObject());
5813        value = Add<HCheckMaps>(value, info->field_maps());
5814      }
5815
5816      // This is a normal store.
5817      instr = New<HStoreNamedField>(checked_object->ActualValue(), field_access,
5818                                    value, info->StoreMode());
5819    }
5820  }
5821
5822  if (transition_to_field) {
5823    Handle<Map> transition(info->transition());
5824    DCHECK(!transition->is_deprecated());
5825    instr->SetTransition(Add<HConstant>(transition));
5826  }
5827  return instr;
5828}
5829
5830Handle<FieldType>
5831HOptimizedGraphBuilder::PropertyAccessInfo::GetFieldTypeFromMap(
5832    Handle<Map> map) const {
5833  DCHECK(IsFound());
5834  DCHECK(number_ < map->NumberOfOwnDescriptors());
5835  return handle(map->instance_descriptors()->GetFieldType(number_), isolate());
5836}
5837
5838bool HOptimizedGraphBuilder::PropertyAccessInfo::IsCompatible(
5839    PropertyAccessInfo* info) {
5840  if (!CanInlinePropertyAccess(map_)) return false;
5841
5842  // Currently only handle AstType::Number as a polymorphic case.
5843  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
5844  // instruction.
5845  if (IsNumberType()) return false;
5846
5847  // Values are only compatible for monomorphic load if they all behave the same
5848  // regarding value wrappers.
5849  if (IsValueWrapped() != info->IsValueWrapped()) return false;
5850
5851  if (!LookupDescriptor()) return false;
5852
5853  if (!IsFound()) {
5854    return (!info->IsFound() || info->has_holder()) &&
5855           map()->prototype() == info->map()->prototype();
5856  }
5857
5858  // Mismatch if the other access info found the property in the prototype
5859  // chain.
5860  if (info->has_holder()) return false;
5861
5862  if (IsAccessorConstant()) {
5863    return accessor_.is_identical_to(info->accessor_) &&
5864        api_holder_.is_identical_to(info->api_holder_);
5865  }
5866
5867  if (IsDataConstant()) {
5868    return constant_.is_identical_to(info->constant_);
5869  }
5870
5871  DCHECK(IsData());
5872  if (!info->IsData()) return false;
5873
5874  Representation r = access_.representation();
5875  if (IsLoad()) {
5876    if (!info->access_.representation().IsCompatibleForLoad(r)) return false;
5877  } else {
5878    if (!info->access_.representation().IsCompatibleForStore(r)) return false;
5879  }
5880  if (info->access_.offset() != access_.offset()) return false;
5881  if (info->access_.IsInobject() != access_.IsInobject()) return false;
5882  if (IsLoad()) {
5883    if (field_maps_.is_empty()) {
5884      info->field_maps_.Clear();
5885    } else if (!info->field_maps_.is_empty()) {
5886      for (int i = 0; i < field_maps_.length(); ++i) {
5887        info->field_maps_.AddMapIfMissing(field_maps_.at(i), info->zone());
5888      }
5889      info->field_maps_.Sort();
5890    }
5891  } else {
5892    // We can only merge stores that agree on their field maps. The comparison
5893    // below is safe, since we keep the field maps sorted.
5894    if (field_maps_.length() != info->field_maps_.length()) return false;
5895    for (int i = 0; i < field_maps_.length(); ++i) {
5896      if (!field_maps_.at(i).is_identical_to(info->field_maps_.at(i))) {
5897        return false;
5898      }
5899    }
5900  }
5901  info->GeneralizeRepresentation(r);
5902  info->field_type_ = info->field_type_.Combine(field_type_);
5903  return true;
5904}
5905
5906
5907bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupDescriptor() {
5908  if (!map_->IsJSObjectMap()) return true;
5909  LookupDescriptor(*map_, *name_);
5910  return LoadResult(map_);
5911}
5912
5913
5914bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadResult(Handle<Map> map) {
5915  if (!IsLoad() && IsProperty() && IsReadOnly()) {
5916    return false;
5917  }
5918
5919  if (IsData()) {
5920    // Construct the object field access.
5921    int index = GetLocalFieldIndexFromMap(map);
5922    access_ = HObjectAccess::ForField(map, index, representation(), name_);
5923
5924    // Load field map for heap objects.
5925    return LoadFieldMaps(map);
5926  } else if (IsAccessorConstant()) {
5927    Handle<Object> accessors = GetAccessorsFromMap(map);
5928    if (!accessors->IsAccessorPair()) return false;
5929    Object* raw_accessor =
5930        IsLoad() ? Handle<AccessorPair>::cast(accessors)->getter()
5931                 : Handle<AccessorPair>::cast(accessors)->setter();
5932    if (!raw_accessor->IsJSFunction() &&
5933        !raw_accessor->IsFunctionTemplateInfo())
5934      return false;
5935    Handle<Object> accessor = handle(HeapObject::cast(raw_accessor));
5936    CallOptimization call_optimization(accessor);
5937    if (call_optimization.is_simple_api_call()) {
5938      CallOptimization::HolderLookup holder_lookup;
5939      api_holder_ =
5940          call_optimization.LookupHolderOfExpectedType(map_, &holder_lookup);
5941    }
5942    accessor_ = accessor;
5943  } else if (IsDataConstant()) {
5944    constant_ = GetConstantFromMap(map);
5945  }
5946
5947  return true;
5948}
5949
5950
5951bool HOptimizedGraphBuilder::PropertyAccessInfo::LoadFieldMaps(
5952    Handle<Map> map) {
5953  // Clear any previously collected field maps/type.
5954  field_maps_.Clear();
5955  field_type_ = HType::Tagged();
5956
5957  // Figure out the field type from the accessor map.
5958  Handle<FieldType> field_type = GetFieldTypeFromMap(map);
5959
5960  // Collect the (stable) maps from the field type.
5961  if (field_type->IsClass()) {
5962    DCHECK(access_.representation().IsHeapObject());
5963    Handle<Map> field_map = field_type->AsClass();
5964    if (field_map->is_stable()) {
5965      field_maps_.Add(field_map, zone());
5966    }
5967  }
5968
5969  if (field_maps_.is_empty()) {
5970    // Store is not safe if the field map was cleared.
5971    return IsLoad() || !field_type->IsNone();
5972  }
5973
5974  // Determine field HType from field type.
5975  field_type_ = HType::FromFieldType(field_type, zone());
5976  DCHECK(field_type_.IsHeapObject());
5977
5978  // Add dependency on the map that introduced the field.
5979  top_info()->dependencies()->AssumeFieldOwner(GetFieldOwnerFromMap(map));
5980  return true;
5981}
5982
5983
5984bool HOptimizedGraphBuilder::PropertyAccessInfo::LookupInPrototypes() {
5985  Handle<Map> map = this->map();
5986  if (name_->IsPrivate()) {
5987    NotFound();
5988    return !map->has_hidden_prototype();
5989  }
5990
5991  while (map->prototype()->IsJSObject()) {
5992    holder_ = handle(JSObject::cast(map->prototype()));
5993    if (holder_->map()->is_deprecated()) {
5994      JSObject::TryMigrateInstance(holder_);
5995    }
5996    map = Handle<Map>(holder_->map());
5997    if (!CanInlinePropertyAccess(map)) {
5998      NotFound();
5999      return false;
6000    }
6001    LookupDescriptor(*map, *name_);
6002    if (IsFound()) return LoadResult(map);
6003  }
6004
6005  NotFound();
6006  return !map->prototype()->IsJSReceiver();
6007}
6008
6009
6010bool HOptimizedGraphBuilder::PropertyAccessInfo::IsIntegerIndexedExotic() {
6011  InstanceType instance_type = map_->instance_type();
6012  return instance_type == JS_TYPED_ARRAY_TYPE && name_->IsString() &&
6013         IsSpecialIndex(isolate()->unicode_cache(), String::cast(*name_));
6014}
6015
6016
6017bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessMonomorphic() {
6018  if (!CanInlinePropertyAccess(map_)) return false;
6019  if (IsJSObjectFieldAccessor()) return IsLoad();
6020  if (map_->IsJSFunctionMap() && map_->is_constructor() &&
6021      !map_->has_non_instance_prototype() &&
6022      name_.is_identical_to(isolate()->factory()->prototype_string())) {
6023    return IsLoad();
6024  }
6025  if (!LookupDescriptor()) return false;
6026  if (IsFound()) return IsLoad() || !IsReadOnly();
6027  if (IsIntegerIndexedExotic()) return false;
6028  if (!LookupInPrototypes()) return false;
6029  if (IsLoad()) return true;
6030
6031  if (IsAccessorConstant()) return true;
6032  LookupTransition(*map_, *name_, NONE);
6033  if (IsTransitionToData() && map_->unused_property_fields() > 0) {
6034    // Construct the object field access.
6035    int descriptor = transition()->LastAdded();
6036    int index =
6037        transition()->instance_descriptors()->GetFieldIndex(descriptor) -
6038        map_->GetInObjectProperties();
6039    PropertyDetails details =
6040        transition()->instance_descriptors()->GetDetails(descriptor);
6041    Representation representation = details.representation();
6042    access_ = HObjectAccess::ForField(map_, index, representation, name_);
6043
6044    // Load field map for heap objects.
6045    return LoadFieldMaps(transition());
6046  }
6047  return false;
6048}
6049
6050
6051bool HOptimizedGraphBuilder::PropertyAccessInfo::CanAccessAsMonomorphic(
6052    SmallMapList* maps) {
6053  DCHECK(map_.is_identical_to(maps->first()));
6054  if (!CanAccessMonomorphic()) return false;
6055  STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6056  if (maps->length() > kMaxLoadPolymorphism) return false;
6057  HObjectAccess access = HObjectAccess::ForMap();  // bogus default
6058  if (GetJSObjectFieldAccess(&access)) {
6059    for (int i = 1; i < maps->length(); ++i) {
6060      PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6061      HObjectAccess test_access = HObjectAccess::ForMap();  // bogus default
6062      if (!test_info.GetJSObjectFieldAccess(&test_access)) return false;
6063      if (!access.Equals(test_access)) return false;
6064    }
6065    return true;
6066  }
6067
6068  // Currently only handle numbers as a polymorphic case.
6069  // TODO(verwaest): Support monomorphic handling of numbers with a HCheckNumber
6070  // instruction.
6071  if (IsNumberType()) return false;
6072
6073  // Multiple maps cannot transition to the same target map.
6074  DCHECK(!IsLoad() || !IsTransition());
6075  if (IsTransition() && maps->length() > 1) return false;
6076
6077  for (int i = 1; i < maps->length(); ++i) {
6078    PropertyAccessInfo test_info(builder_, access_type_, maps->at(i), name_);
6079    if (!test_info.IsCompatible(this)) return false;
6080  }
6081
6082  return true;
6083}
6084
6085
6086Handle<Map> HOptimizedGraphBuilder::PropertyAccessInfo::map() {
6087  Handle<JSFunction> ctor;
6088  if (Map::GetConstructorFunction(
6089          map_, handle(current_info()->closure()->context()->native_context()))
6090          .ToHandle(&ctor)) {
6091    return handle(ctor->initial_map());
6092  }
6093  return map_;
6094}
6095
6096
6097static bool NeedsWrapping(Handle<Map> map, Handle<JSFunction> target) {
6098  return !map->IsJSObjectMap() &&
6099         is_sloppy(target->shared()->language_mode()) &&
6100         !target->shared()->native();
6101}
6102
6103
6104bool HOptimizedGraphBuilder::PropertyAccessInfo::NeedsWrappingFor(
6105    Handle<JSFunction> target) const {
6106  return NeedsWrapping(map_, target);
6107}
6108
6109
6110HValue* HOptimizedGraphBuilder::BuildMonomorphicAccess(
6111    PropertyAccessInfo* info, HValue* object, HValue* checked_object,
6112    HValue* value, BailoutId ast_id, BailoutId return_id,
6113    bool can_inline_accessor) {
6114  HObjectAccess access = HObjectAccess::ForMap();  // bogus default
6115  if (info->GetJSObjectFieldAccess(&access)) {
6116    DCHECK(info->IsLoad());
6117    return New<HLoadNamedField>(object, checked_object, access);
6118  }
6119
6120  if (info->name().is_identical_to(isolate()->factory()->prototype_string()) &&
6121      info->map()->IsJSFunctionMap() && info->map()->is_constructor()) {
6122    DCHECK(!info->map()->has_non_instance_prototype());
6123    return New<HLoadFunctionPrototype>(checked_object);
6124  }
6125
6126  HValue* checked_holder = checked_object;
6127  if (info->has_holder()) {
6128    Handle<JSObject> prototype(JSObject::cast(info->map()->prototype()));
6129    checked_holder = BuildCheckPrototypeMaps(prototype, info->holder());
6130  }
6131
6132  if (!info->IsFound()) {
6133    DCHECK(info->IsLoad());
6134    return graph()->GetConstantUndefined();
6135  }
6136
6137  if (info->IsData()) {
6138    if (info->IsLoad()) {
6139      return BuildLoadNamedField(info, checked_holder);
6140    } else {
6141      return BuildStoreNamedField(info, checked_object, value);
6142    }
6143  }
6144
6145  if (info->IsTransition()) {
6146    DCHECK(!info->IsLoad());
6147    return BuildStoreNamedField(info, checked_object, value);
6148  }
6149
6150  if (info->IsAccessorConstant()) {
6151    MaybeHandle<Name> maybe_name =
6152        FunctionTemplateInfo::TryGetCachedPropertyName(isolate(),
6153                                                       info->accessor());
6154    if (!maybe_name.is_null()) {
6155      Handle<Name> name = maybe_name.ToHandleChecked();
6156      PropertyAccessInfo cache_info(this, LOAD, info->map(), name);
6157      // Load new target.
6158      if (cache_info.CanAccessMonomorphic()) {
6159        return BuildLoadNamedField(&cache_info, checked_object);
6160      }
6161    }
6162
6163    Push(checked_object);
6164    int argument_count = 1;
6165    if (!info->IsLoad()) {
6166      argument_count = 2;
6167      Push(value);
6168    }
6169
6170    if (info->accessor()->IsJSFunction() &&
6171        info->NeedsWrappingFor(Handle<JSFunction>::cast(info->accessor()))) {
6172      HValue* function = Add<HConstant>(info->accessor());
6173      PushArgumentsFromEnvironment(argument_count);
6174      return NewCallFunction(function, argument_count, TailCallMode::kDisallow,
6175                             ConvertReceiverMode::kNotNullOrUndefined,
6176                             TailCallMode::kDisallow);
6177    } else if (FLAG_inline_accessors && can_inline_accessor) {
6178      bool success = info->IsLoad()
6179          ? TryInlineGetter(info->accessor(), info->map(), ast_id, return_id)
6180          : TryInlineSetter(
6181              info->accessor(), info->map(), ast_id, return_id, value);
6182      if (success || HasStackOverflow()) return NULL;
6183    }
6184
6185    PushArgumentsFromEnvironment(argument_count);
6186    if (!info->accessor()->IsJSFunction()) {
6187      Bailout(kInliningBailedOut);
6188      return nullptr;
6189    }
6190    return NewCallConstantFunction(Handle<JSFunction>::cast(info->accessor()),
6191                                   argument_count, TailCallMode::kDisallow,
6192                                   TailCallMode::kDisallow);
6193  }
6194
6195  DCHECK(info->IsDataConstant());
6196  if (info->IsLoad()) {
6197    return New<HConstant>(info->constant());
6198  } else {
6199    return New<HCheckValue>(value, Handle<JSFunction>::cast(info->constant()));
6200  }
6201}
6202
6203void HOptimizedGraphBuilder::HandlePolymorphicNamedFieldAccess(
6204    PropertyAccessType access_type, Expression* expr, FeedbackSlot slot,
6205    BailoutId ast_id, BailoutId return_id, HValue* object, HValue* value,
6206    SmallMapList* maps, Handle<Name> name) {
6207  // Something did not match; must use a polymorphic load.
6208  int count = 0;
6209  HBasicBlock* join = NULL;
6210  HBasicBlock* number_block = NULL;
6211  bool handled_string = false;
6212
6213  bool handle_smi = false;
6214  STATIC_ASSERT(kMaxLoadPolymorphism == kMaxStorePolymorphism);
6215  int i;
6216  for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6217    PropertyAccessInfo info(this, access_type, maps->at(i), name);
6218    if (info.IsStringType()) {
6219      if (handled_string) continue;
6220      handled_string = true;
6221    }
6222    if (info.CanAccessMonomorphic()) {
6223      count++;
6224      if (info.IsNumberType()) {
6225        handle_smi = true;
6226        break;
6227      }
6228    }
6229  }
6230
6231  if (i < maps->length()) {
6232    count = -1;
6233    maps->Clear();
6234  } else {
6235    count = 0;
6236  }
6237  HControlInstruction* smi_check = NULL;
6238  handled_string = false;
6239
6240  for (i = 0; i < maps->length() && count < kMaxLoadPolymorphism; ++i) {
6241    PropertyAccessInfo info(this, access_type, maps->at(i), name);
6242    if (info.IsStringType()) {
6243      if (handled_string) continue;
6244      handled_string = true;
6245    }
6246    if (!info.CanAccessMonomorphic()) continue;
6247
6248    if (count == 0) {
6249      join = graph()->CreateBasicBlock();
6250      if (handle_smi) {
6251        HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
6252        HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
6253        number_block = graph()->CreateBasicBlock();
6254        smi_check = New<HIsSmiAndBranch>(
6255            object, empty_smi_block, not_smi_block);
6256        FinishCurrentBlock(smi_check);
6257        GotoNoSimulate(empty_smi_block, number_block);
6258        set_current_block(not_smi_block);
6259      } else {
6260        BuildCheckHeapObject(object);
6261      }
6262    }
6263    ++count;
6264    HBasicBlock* if_true = graph()->CreateBasicBlock();
6265    HBasicBlock* if_false = graph()->CreateBasicBlock();
6266    HUnaryControlInstruction* compare;
6267
6268    HValue* dependency;
6269    if (info.IsNumberType()) {
6270      Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
6271      compare = New<HCompareMap>(object, heap_number_map, if_true, if_false);
6272      dependency = smi_check;
6273    } else if (info.IsStringType()) {
6274      compare = New<HIsStringAndBranch>(object, if_true, if_false);
6275      dependency = compare;
6276    } else {
6277      compare = New<HCompareMap>(object, info.map(), if_true, if_false);
6278      dependency = compare;
6279    }
6280    FinishCurrentBlock(compare);
6281
6282    if (info.IsNumberType()) {
6283      GotoNoSimulate(if_true, number_block);
6284      if_true = number_block;
6285    }
6286
6287    set_current_block(if_true);
6288
6289    HValue* access =
6290        BuildMonomorphicAccess(&info, object, dependency, value, ast_id,
6291                               return_id, FLAG_polymorphic_inlining);
6292
6293    HValue* result = NULL;
6294    switch (access_type) {
6295      case LOAD:
6296        result = access;
6297        break;
6298      case STORE:
6299        result = value;
6300        break;
6301    }
6302
6303    if (access == NULL) {
6304      if (HasStackOverflow()) return;
6305    } else {
6306      if (access->IsInstruction()) {
6307        HInstruction* instr = HInstruction::cast(access);
6308        if (!instr->IsLinked()) AddInstruction(instr);
6309      }
6310      if (!ast_context()->IsEffect()) Push(result);
6311    }
6312
6313    if (current_block() != NULL) Goto(join);
6314    set_current_block(if_false);
6315  }
6316
6317  // Finish up.  Unconditionally deoptimize if we've handled all the maps we
6318  // know about and do not want to handle ones we've never seen.  Otherwise
6319  // use a generic IC.
6320  if (count == maps->length() && FLAG_deoptimize_uncommon_cases) {
6321    FinishExitWithHardDeoptimization(
6322        DeoptimizeReason::kUnknownMapInPolymorphicAccess);
6323  } else {
6324    HInstruction* instr =
6325        BuildNamedGeneric(access_type, expr, slot, object, name, value);
6326    AddInstruction(instr);
6327    if (!ast_context()->IsEffect()) Push(access_type == LOAD ? instr : value);
6328
6329    if (join != NULL) {
6330      Goto(join);
6331    } else {
6332      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6333      if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6334      return;
6335    }
6336  }
6337
6338  DCHECK(join != NULL);
6339  if (join->HasPredecessor()) {
6340    join->SetJoinId(ast_id);
6341    set_current_block(join);
6342    if (!ast_context()->IsEffect()) ast_context()->ReturnValue(Pop());
6343  } else {
6344    set_current_block(NULL);
6345  }
6346}
6347
6348static bool ComputeReceiverTypes(Expression* expr, HValue* receiver,
6349                                 SmallMapList** t,
6350                                 HOptimizedGraphBuilder* builder) {
6351  Zone* zone = builder->zone();
6352  SmallMapList* maps = expr->GetReceiverTypes();
6353  *t = maps;
6354  bool monomorphic = expr->IsMonomorphic();
6355  if (maps != nullptr && receiver->HasMonomorphicJSObjectType()) {
6356    if (maps->length() > 0) {
6357      Map* root_map = receiver->GetMonomorphicJSObjectMap()->FindRootMap();
6358      maps->FilterForPossibleTransitions(root_map);
6359      monomorphic = maps->length() == 1;
6360    } else {
6361      // No type feedback, see if we can infer the type. This is safely
6362      // possible if the receiver had a known map at some point, and no
6363      // map-changing stores have happened to it since.
6364      Handle<Map> candidate_map = receiver->GetMonomorphicJSObjectMap();
6365      for (HInstruction* current = builder->current_block()->last();
6366           current != nullptr; current = current->previous()) {
6367        if (current->IsBlockEntry()) break;
6368        if (current->CheckChangesFlag(kMaps)) {
6369          // Only allow map changes that store the candidate map. We don't
6370          // need to care which object the map is being written into.
6371          if (!current->IsStoreNamedField()) break;
6372          HStoreNamedField* map_change = HStoreNamedField::cast(current);
6373          if (!map_change->value()->IsConstant()) break;
6374          HConstant* map_constant = HConstant::cast(map_change->value());
6375          if (!map_constant->representation().IsTagged()) break;
6376          Handle<Object> map = map_constant->handle(builder->isolate());
6377          if (!map.is_identical_to(candidate_map)) break;
6378        }
6379        if (current == receiver) {
6380          // We made it all the way back to the receiver without encountering
6381          // a map change! So we can assume that the receiver still has the
6382          // candidate_map we know about.
6383          maps->Add(candidate_map, zone);
6384          monomorphic = true;
6385          break;
6386        }
6387      }
6388    }
6389  }
6390  return monomorphic && CanInlinePropertyAccess(maps->first());
6391}
6392
6393
6394static bool AreStringTypes(SmallMapList* maps) {
6395  for (int i = 0; i < maps->length(); i++) {
6396    if (maps->at(i)->instance_type() >= FIRST_NONSTRING_TYPE) return false;
6397  }
6398  return true;
6399}
6400
6401void HOptimizedGraphBuilder::BuildStore(Expression* expr, Property* prop,
6402                                        FeedbackSlot slot, BailoutId ast_id,
6403                                        BailoutId return_id,
6404                                        bool is_uninitialized) {
6405  if (!prop->key()->IsPropertyName()) {
6406    // Keyed store.
6407    HValue* value = Pop();
6408    HValue* key = Pop();
6409    HValue* object = Pop();
6410    bool has_side_effects = false;
6411    HValue* result =
6412        HandleKeyedElementAccess(object, key, value, expr, slot, ast_id,
6413                                 return_id, STORE, &has_side_effects);
6414    if (has_side_effects) {
6415      if (!ast_context()->IsEffect()) Push(value);
6416      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6417      if (!ast_context()->IsEffect()) Drop(1);
6418    }
6419    if (result == NULL) return;
6420    return ast_context()->ReturnValue(value);
6421  }
6422
6423  // Named store.
6424  HValue* value = Pop();
6425  HValue* object = Pop();
6426
6427  Literal* key = prop->key()->AsLiteral();
6428  Handle<String> name = Handle<String>::cast(key->value());
6429  DCHECK(!name.is_null());
6430
6431  HValue* access = BuildNamedAccess(STORE, ast_id, return_id, expr, slot,
6432                                    object, name, value, is_uninitialized);
6433  if (access == NULL) return;
6434
6435  if (!ast_context()->IsEffect()) Push(value);
6436  if (access->IsInstruction()) AddInstruction(HInstruction::cast(access));
6437  if (access->HasObservableSideEffects()) {
6438    Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6439  }
6440  if (!ast_context()->IsEffect()) Drop(1);
6441  return ast_context()->ReturnValue(value);
6442}
6443
6444
6445void HOptimizedGraphBuilder::HandlePropertyAssignment(Assignment* expr) {
6446  Property* prop = expr->target()->AsProperty();
6447  DCHECK(prop != NULL);
6448  CHECK_ALIVE(VisitForValue(prop->obj()));
6449  if (!prop->key()->IsPropertyName()) {
6450    CHECK_ALIVE(VisitForValue(prop->key()));
6451  }
6452  CHECK_ALIVE(VisitForValue(expr->value()));
6453  BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6454             expr->AssignmentId(), expr->IsUninitialized());
6455}
6456
6457HInstruction* HOptimizedGraphBuilder::InlineGlobalPropertyStore(
6458    LookupIterator* it, HValue* value, BailoutId ast_id) {
6459  Handle<PropertyCell> cell = it->GetPropertyCell();
6460  top_info()->dependencies()->AssumePropertyCell(cell);
6461  auto cell_type = it->property_details().cell_type();
6462  if (cell_type == PropertyCellType::kConstant ||
6463      cell_type == PropertyCellType::kUndefined) {
6464    Handle<Object> constant(cell->value(), isolate());
6465    if (value->IsConstant()) {
6466      HConstant* c_value = HConstant::cast(value);
6467      if (!constant.is_identical_to(c_value->handle(isolate()))) {
6468        Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
6469                         Deoptimizer::EAGER);
6470      }
6471    } else {
6472      HValue* c_constant = Add<HConstant>(constant);
6473      IfBuilder builder(this);
6474      if (constant->IsNumber()) {
6475        builder.If<HCompareNumericAndBranch>(value, c_constant, Token::EQ);
6476      } else {
6477        builder.If<HCompareObjectEqAndBranch>(value, c_constant);
6478      }
6479      builder.Then();
6480      builder.Else();
6481      Add<HDeoptimize>(DeoptimizeReason::kConstantGlobalVariableAssignment,
6482                       Deoptimizer::EAGER);
6483      builder.End();
6484    }
6485  }
6486  HConstant* cell_constant = Add<HConstant>(cell);
6487  auto access = HObjectAccess::ForPropertyCellValue();
6488  if (cell_type == PropertyCellType::kConstantType) {
6489    switch (cell->GetConstantType()) {
6490      case PropertyCellConstantType::kSmi:
6491        access = access.WithRepresentation(Representation::Smi());
6492        break;
6493      case PropertyCellConstantType::kStableMap: {
6494        // First check that the previous value of the {cell} still has the
6495        // map that we are about to check the new {value} for. If not, then
6496        // the stable map assumption was invalidated and we cannot continue
6497        // with the optimized code.
6498        Handle<HeapObject> cell_value(HeapObject::cast(cell->value()));
6499        Handle<Map> cell_value_map(cell_value->map());
6500        if (!cell_value_map->is_stable()) {
6501          Bailout(kUnstableConstantTypeHeapObject);
6502          return nullptr;
6503        }
6504        top_info()->dependencies()->AssumeMapStable(cell_value_map);
6505        // Now check that the new {value} is a HeapObject with the same map
6506        Add<HCheckHeapObject>(value);
6507        value = Add<HCheckMaps>(value, cell_value_map);
6508        access = access.WithRepresentation(Representation::HeapObject());
6509        break;
6510      }
6511    }
6512  }
6513  HInstruction* instr = New<HStoreNamedField>(cell_constant, access, value);
6514  instr->ClearChangesFlag(kInobjectFields);
6515  instr->SetChangesFlag(kGlobalVars);
6516  return instr;
6517}
6518
6519// Because not every expression has a position and there is not common
6520// superclass of Assignment and CountOperation, we cannot just pass the
6521// owning expression instead of position and ast_id separately.
6522void HOptimizedGraphBuilder::HandleGlobalVariableAssignment(Variable* var,
6523                                                            HValue* value,
6524                                                            FeedbackSlot slot,
6525                                                            BailoutId ast_id) {
6526  Handle<JSGlobalObject> global(current_info()->global_object());
6527
6528  // Lookup in script contexts.
6529  {
6530    Handle<ScriptContextTable> script_contexts(
6531        global->native_context()->script_context_table());
6532    ScriptContextTable::LookupResult lookup;
6533    if (ScriptContextTable::Lookup(script_contexts, var->name(), &lookup)) {
6534      if (lookup.mode == CONST) {
6535        return Bailout(kNonInitializerAssignmentToConst);
6536      }
6537      Handle<Context> script_context =
6538          ScriptContextTable::GetContext(script_contexts, lookup.context_index);
6539
6540      Handle<Object> current_value =
6541          FixedArray::get(*script_context, lookup.slot_index, isolate());
6542
6543      // If the values is not the hole, it will stay initialized,
6544      // so no need to generate a check.
6545      if (current_value->IsTheHole(isolate())) {
6546        return Bailout(kReferenceToUninitializedVariable);
6547      }
6548
6549      HStoreNamedField* instr = Add<HStoreNamedField>(
6550          Add<HConstant>(script_context),
6551          HObjectAccess::ForContextSlot(lookup.slot_index), value);
6552      USE(instr);
6553      DCHECK(instr->HasObservableSideEffects());
6554      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6555      return;
6556    }
6557  }
6558
6559  LookupIterator it(global, var->name(), LookupIterator::OWN);
6560  if (CanInlineGlobalPropertyAccess(var, &it, STORE)) {
6561    HInstruction* instr = InlineGlobalPropertyStore(&it, value, ast_id);
6562    if (!instr) return;
6563    AddInstruction(instr);
6564    if (instr->HasObservableSideEffects()) {
6565      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6566    }
6567  } else {
6568    HValue* global_object = Add<HLoadNamedField>(
6569        BuildGetNativeContext(), nullptr,
6570        HObjectAccess::ForContextSlot(Context::EXTENSION_INDEX));
6571    Handle<FeedbackVector> vector =
6572        handle(current_feedback_vector(), isolate());
6573    HValue* name = Add<HConstant>(var->name());
6574    HValue* vector_value = Add<HConstant>(vector);
6575    HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6576    DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode());
6577    Callable callable = CodeFactory::StoreICInOptimizedCode(
6578        isolate(), function_language_mode());
6579    HValue* stub = Add<HConstant>(callable.code());
6580    HValue* values[] = {global_object, name, value, slot_value, vector_value};
6581    HCallWithDescriptor* instr = Add<HCallWithDescriptor>(
6582        Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6583    USE(instr);
6584    DCHECK(instr->HasObservableSideEffects());
6585    Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
6586  }
6587}
6588
6589
6590void HOptimizedGraphBuilder::HandleCompoundAssignment(Assignment* expr) {
6591  Expression* target = expr->target();
6592  VariableProxy* proxy = target->AsVariableProxy();
6593  Property* prop = target->AsProperty();
6594  DCHECK(proxy == NULL || prop == NULL);
6595
6596  // We have a second position recorded in the FullCodeGenerator to have
6597  // type feedback for the binary operation.
6598  BinaryOperation* operation = expr->binary_operation();
6599
6600  if (proxy != NULL) {
6601    Variable* var = proxy->var();
6602    if (var->mode() == LET)  {
6603      return Bailout(kUnsupportedLetCompoundAssignment);
6604    }
6605
6606    CHECK_ALIVE(VisitForValue(operation));
6607
6608    switch (var->location()) {
6609      case VariableLocation::UNALLOCATED:
6610        HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
6611                                       expr->AssignmentId());
6612        break;
6613
6614      case VariableLocation::PARAMETER:
6615      case VariableLocation::LOCAL:
6616        if (var->mode() == CONST) {
6617          return Bailout(kNonInitializerAssignmentToConst);
6618        }
6619        BindIfLive(var, Top());
6620        break;
6621
6622      case VariableLocation::CONTEXT: {
6623        // Bail out if we try to mutate a parameter value in a function
6624        // using the arguments object.  We do not (yet) correctly handle the
6625        // arguments property of the function.
6626        if (current_info()->scope()->arguments() != NULL) {
6627          // Parameters will be allocated to context slots.  We have no
6628          // direct way to detect that the variable is a parameter so we do
6629          // a linear search of the parameter variables.
6630          int count = current_info()->scope()->num_parameters();
6631          for (int i = 0; i < count; ++i) {
6632            if (var == current_info()->scope()->parameter(i)) {
6633              Bailout(kAssignmentToParameterFunctionUsesArgumentsObject);
6634            }
6635          }
6636        }
6637
6638        HStoreContextSlot::Mode mode;
6639
6640        switch (var->mode()) {
6641          case LET:
6642            mode = HStoreContextSlot::kCheckDeoptimize;
6643            break;
6644          case CONST:
6645            if (var->throw_on_const_assignment(function_language_mode())) {
6646              return Bailout(kNonInitializerAssignmentToConst);
6647            } else {
6648              return ast_context()->ReturnValue(Pop());
6649            }
6650          default:
6651            mode = HStoreContextSlot::kNoCheck;
6652        }
6653
6654        HValue* context = BuildContextChainWalk(var);
6655        HStoreContextSlot* instr = Add<HStoreContextSlot>(
6656            context, var->index(), mode, Top());
6657        if (instr->HasObservableSideEffects()) {
6658          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6659        }
6660        break;
6661      }
6662
6663      case VariableLocation::LOOKUP:
6664        return Bailout(kCompoundAssignmentToLookupSlot);
6665
6666      case VariableLocation::MODULE:
6667        UNREACHABLE();
6668    }
6669    return ast_context()->ReturnValue(Pop());
6670
6671  } else if (prop != NULL) {
6672    CHECK_ALIVE(VisitForValue(prop->obj()));
6673    HValue* object = Top();
6674    HValue* key = NULL;
6675    if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
6676      CHECK_ALIVE(VisitForValue(prop->key()));
6677      key = Top();
6678    }
6679
6680    CHECK_ALIVE(PushLoad(prop, object, key));
6681
6682    CHECK_ALIVE(VisitForValue(expr->value()));
6683    HValue* right = Pop();
6684    HValue* left = Pop();
6685
6686    Push(BuildBinaryOperation(operation, left, right, PUSH_BEFORE_SIMULATE));
6687
6688    BuildStore(expr, prop, expr->AssignmentSlot(), expr->id(),
6689               expr->AssignmentId(), expr->IsUninitialized());
6690  } else {
6691    return Bailout(kInvalidLhsInCompoundAssignment);
6692  }
6693}
6694
6695
6696void HOptimizedGraphBuilder::VisitAssignment(Assignment* expr) {
6697  DCHECK(!HasStackOverflow());
6698  DCHECK(current_block() != NULL);
6699  DCHECK(current_block()->HasPredecessor());
6700
6701  VariableProxy* proxy = expr->target()->AsVariableProxy();
6702  Property* prop = expr->target()->AsProperty();
6703  DCHECK(proxy == NULL || prop == NULL);
6704
6705  if (expr->is_compound()) {
6706    HandleCompoundAssignment(expr);
6707    return;
6708  }
6709
6710  if (prop != NULL) {
6711    HandlePropertyAssignment(expr);
6712  } else if (proxy != NULL) {
6713    Variable* var = proxy->var();
6714
6715    if (var->mode() == CONST) {
6716      if (expr->op() != Token::INIT) {
6717        if (var->throw_on_const_assignment(function_language_mode())) {
6718          return Bailout(kNonInitializerAssignmentToConst);
6719        } else {
6720          CHECK_ALIVE(VisitForValue(expr->value()));
6721          return ast_context()->ReturnValue(Pop());
6722        }
6723      }
6724    }
6725
6726    // Handle the assignment.
6727    switch (var->location()) {
6728      case VariableLocation::UNALLOCATED:
6729        CHECK_ALIVE(VisitForValue(expr->value()));
6730        HandleGlobalVariableAssignment(var, Top(), expr->AssignmentSlot(),
6731                                       expr->AssignmentId());
6732        return ast_context()->ReturnValue(Pop());
6733
6734      case VariableLocation::PARAMETER:
6735      case VariableLocation::LOCAL: {
6736        // Perform an initialization check for let declared variables
6737        // or parameters.
6738        if (var->mode() == LET && expr->op() == Token::ASSIGN) {
6739          HValue* env_value = environment()->Lookup(var);
6740          if (env_value == graph()->GetConstantHole()) {
6741            return Bailout(kAssignmentToLetVariableBeforeInitialization);
6742          }
6743        }
6744        // We do not allow the arguments object to occur in a context where it
6745        // may escape, but assignments to stack-allocated locals are
6746        // permitted.
6747        CHECK_ALIVE(VisitForValue(expr->value(), ARGUMENTS_ALLOWED));
6748        HValue* value = Pop();
6749        BindIfLive(var, value);
6750        return ast_context()->ReturnValue(value);
6751      }
6752
6753      case VariableLocation::CONTEXT: {
6754        // Bail out if we try to mutate a parameter value in a function using
6755        // the arguments object.  We do not (yet) correctly handle the
6756        // arguments property of the function.
6757        if (current_info()->scope()->arguments() != NULL) {
6758          // Parameters will rewrite to context slots.  We have no direct way
6759          // to detect that the variable is a parameter.
6760          int count = current_info()->scope()->num_parameters();
6761          for (int i = 0; i < count; ++i) {
6762            if (var == current_info()->scope()->parameter(i)) {
6763              return Bailout(kAssignmentToParameterInArgumentsObject);
6764            }
6765          }
6766        }
6767
6768        CHECK_ALIVE(VisitForValue(expr->value()));
6769        HStoreContextSlot::Mode mode;
6770        if (expr->op() == Token::ASSIGN) {
6771          switch (var->mode()) {
6772            case LET:
6773              mode = HStoreContextSlot::kCheckDeoptimize;
6774              break;
6775            case CONST:
6776              // If we reached this point, the only possibility
6777              // is a sloppy assignment to a function name.
6778              DCHECK(function_language_mode() == SLOPPY &&
6779                     !var->throw_on_const_assignment(SLOPPY));
6780              return ast_context()->ReturnValue(Pop());
6781            default:
6782              mode = HStoreContextSlot::kNoCheck;
6783          }
6784        } else {
6785          DCHECK_EQ(Token::INIT, expr->op());
6786          mode = HStoreContextSlot::kNoCheck;
6787        }
6788
6789        HValue* context = BuildContextChainWalk(var);
6790        HStoreContextSlot* instr = Add<HStoreContextSlot>(
6791            context, var->index(), mode, Top());
6792        if (instr->HasObservableSideEffects()) {
6793          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
6794        }
6795        return ast_context()->ReturnValue(Pop());
6796      }
6797
6798      case VariableLocation::LOOKUP:
6799        return Bailout(kAssignmentToLOOKUPVariable);
6800
6801      case VariableLocation::MODULE:
6802        UNREACHABLE();
6803    }
6804  } else {
6805    return Bailout(kInvalidLeftHandSideInAssignment);
6806  }
6807}
6808
6809
6810void HOptimizedGraphBuilder::VisitYield(Yield* expr) {
6811  // Generators are not optimized, so we should never get here.
6812  UNREACHABLE();
6813}
6814
6815
6816void HOptimizedGraphBuilder::VisitThrow(Throw* expr) {
6817  DCHECK(!HasStackOverflow());
6818  DCHECK(current_block() != NULL);
6819  DCHECK(current_block()->HasPredecessor());
6820  if (!ast_context()->IsEffect()) {
6821    // The parser turns invalid left-hand sides in assignments into throw
6822    // statements, which may not be in effect contexts. We might still try
6823    // to optimize such functions; bail out now if we do.
6824    return Bailout(kInvalidLeftHandSideInAssignment);
6825  }
6826  CHECK_ALIVE(VisitForValue(expr->exception()));
6827
6828  HValue* value = environment()->Pop();
6829  if (!is_tracking_positions()) SetSourcePosition(expr->position());
6830  Add<HPushArguments>(value);
6831  Add<HCallRuntime>(Runtime::FunctionForId(Runtime::kThrow), 1);
6832  Add<HSimulate>(expr->id());
6833
6834  // If the throw definitely exits the function, we can finish with a dummy
6835  // control flow at this point.  This is not the case if the throw is inside
6836  // an inlined function which may be replaced.
6837  if (call_context() == NULL) {
6838    FinishExitCurrentBlock(New<HAbnormalExit>());
6839  }
6840}
6841
6842
6843HInstruction* HGraphBuilder::AddLoadStringInstanceType(HValue* string) {
6844  if (string->IsConstant()) {
6845    HConstant* c_string = HConstant::cast(string);
6846    if (c_string->HasStringValue()) {
6847      return Add<HConstant>(c_string->StringValue()->map()->instance_type());
6848    }
6849  }
6850  return Add<HLoadNamedField>(
6851      Add<HLoadNamedField>(string, nullptr, HObjectAccess::ForMap()), nullptr,
6852      HObjectAccess::ForMapInstanceType());
6853}
6854
6855
6856HInstruction* HGraphBuilder::AddLoadStringLength(HValue* string) {
6857  return AddInstruction(BuildLoadStringLength(string));
6858}
6859
6860
6861HInstruction* HGraphBuilder::BuildLoadStringLength(HValue* string) {
6862  if (string->IsConstant()) {
6863    HConstant* c_string = HConstant::cast(string);
6864    if (c_string->HasStringValue()) {
6865      return New<HConstant>(c_string->StringValue()->length());
6866    }
6867  }
6868  return New<HLoadNamedField>(string, nullptr,
6869                              HObjectAccess::ForStringLength());
6870}
6871
6872HInstruction* HOptimizedGraphBuilder::BuildNamedGeneric(
6873    PropertyAccessType access_type, Expression* expr, FeedbackSlot slot,
6874    HValue* object, Handle<Name> name, HValue* value, bool is_uninitialized) {
6875  if (is_uninitialized) {
6876    Add<HDeoptimize>(
6877        DeoptimizeReason::kInsufficientTypeFeedbackForGenericNamedAccess,
6878        Deoptimizer::SOFT);
6879  }
6880  Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
6881
6882  HValue* key = Add<HConstant>(name);
6883  HValue* vector_value = Add<HConstant>(vector);
6884  HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6885
6886  if (access_type == LOAD) {
6887    HValue* values[] = {object, key, slot_value, vector_value};
6888    if (!expr->AsProperty()->key()->IsPropertyName()) {
6889      DCHECK(vector->IsKeyedLoadIC(slot));
6890      // It's possible that a keyed load of a constant string was converted
6891      // to a named load. Here, at the last minute, we need to make sure to
6892      // use a generic Keyed Load if we are using the type vector, because
6893      // it has to share information with full code.
6894      Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
6895      HValue* stub = Add<HConstant>(callable.code());
6896      HCallWithDescriptor* result =
6897          New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
6898                                   callable.descriptor(), ArrayVector(values));
6899      return result;
6900    }
6901    DCHECK(vector->IsLoadIC(slot));
6902    Callable callable = CodeFactory::LoadICInOptimizedCode(isolate());
6903    HValue* stub = Add<HConstant>(callable.code());
6904    HCallWithDescriptor* result = New<HCallWithDescriptor>(
6905        Code::LOAD_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6906    return result;
6907
6908  } else {
6909    HValue* values[] = {object, key, value, slot_value, vector_value};
6910    if (vector->IsKeyedStoreIC(slot)) {
6911      // It's possible that a keyed store of a constant string was converted
6912      // to a named store. Here, at the last minute, we need to make sure to
6913      // use a generic Keyed Store if we are using the type vector, because
6914      // it has to share information with full code.
6915      DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode());
6916      Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
6917          isolate(), function_language_mode());
6918      HValue* stub = Add<HConstant>(callable.code());
6919      HCallWithDescriptor* result =
6920          New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
6921                                   callable.descriptor(), ArrayVector(values));
6922      return result;
6923    }
6924    HCallWithDescriptor* result;
6925    if (vector->IsStoreOwnIC(slot)) {
6926      Callable callable = CodeFactory::StoreOwnICInOptimizedCode(isolate());
6927      HValue* stub = Add<HConstant>(callable.code());
6928      result = New<HCallWithDescriptor>(
6929          Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6930    } else {
6931      DCHECK(vector->IsStoreIC(slot));
6932      DCHECK_EQ(vector->GetLanguageMode(slot), function_language_mode());
6933      Callable callable = CodeFactory::StoreICInOptimizedCode(
6934          isolate(), function_language_mode());
6935      HValue* stub = Add<HConstant>(callable.code());
6936      result = New<HCallWithDescriptor>(
6937          Code::STORE_IC, stub, 0, callable.descriptor(), ArrayVector(values));
6938    }
6939    return result;
6940  }
6941}
6942
6943HInstruction* HOptimizedGraphBuilder::BuildKeyedGeneric(
6944    PropertyAccessType access_type, Expression* expr, FeedbackSlot slot,
6945    HValue* object, HValue* key, HValue* value) {
6946  Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
6947  HValue* vector_value = Add<HConstant>(vector);
6948  HValue* slot_value = Add<HConstant>(vector->GetIndex(slot));
6949
6950  if (access_type == LOAD) {
6951    HValue* values[] = {object, key, slot_value, vector_value};
6952
6953    Callable callable = CodeFactory::KeyedLoadICInOptimizedCode(isolate());
6954    HValue* stub = Add<HConstant>(callable.code());
6955    HCallWithDescriptor* result =
6956        New<HCallWithDescriptor>(Code::KEYED_LOAD_IC, stub, 0,
6957                                 callable.descriptor(), ArrayVector(values));
6958    return result;
6959  } else {
6960    HValue* values[] = {object, key, value, slot_value, vector_value};
6961
6962    Callable callable = CodeFactory::KeyedStoreICInOptimizedCode(
6963        isolate(), function_language_mode());
6964    HValue* stub = Add<HConstant>(callable.code());
6965    HCallWithDescriptor* result =
6966        New<HCallWithDescriptor>(Code::KEYED_STORE_IC, stub, 0,
6967                                 callable.descriptor(), ArrayVector(values));
6968    return result;
6969  }
6970}
6971
6972
6973LoadKeyedHoleMode HOptimizedGraphBuilder::BuildKeyedHoleMode(Handle<Map> map) {
6974  // Loads from a "stock" fast holey double arrays can elide the hole check.
6975  // Loads from a "stock" fast holey array can convert the hole to undefined
6976  // with impunity.
6977  LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
6978  bool holey_double_elements =
6979      *map == isolate()->get_initial_js_array_map(FAST_HOLEY_DOUBLE_ELEMENTS);
6980  bool holey_elements =
6981      *map == isolate()->get_initial_js_array_map(FAST_HOLEY_ELEMENTS);
6982  if ((holey_double_elements || holey_elements) &&
6983      isolate()->IsFastArrayConstructorPrototypeChainIntact()) {
6984    load_mode =
6985        holey_double_elements ? ALLOW_RETURN_HOLE : CONVERT_HOLE_TO_UNDEFINED;
6986
6987    Handle<JSObject> prototype(JSObject::cast(map->prototype()), isolate());
6988    Handle<JSObject> object_prototype = isolate()->initial_object_prototype();
6989    BuildCheckPrototypeMaps(prototype, object_prototype);
6990    graph()->MarkDependsOnEmptyArrayProtoElements();
6991  }
6992  return load_mode;
6993}
6994
6995
6996HInstruction* HOptimizedGraphBuilder::BuildMonomorphicElementAccess(
6997    HValue* object,
6998    HValue* key,
6999    HValue* val,
7000    HValue* dependency,
7001    Handle<Map> map,
7002    PropertyAccessType access_type,
7003    KeyedAccessStoreMode store_mode) {
7004  HCheckMaps* checked_object = Add<HCheckMaps>(object, map, dependency);
7005
7006  if (access_type == STORE && map->prototype()->IsJSObject()) {
7007    // monomorphic stores need a prototype chain check because shape
7008    // changes could allow callbacks on elements in the chain that
7009    // aren't compatible with monomorphic keyed stores.
7010    PrototypeIterator iter(map);
7011    JSObject* holder = NULL;
7012    while (!iter.IsAtEnd()) {
7013      // JSProxies can't occur here because we wouldn't have installed a
7014      // non-generic IC if there were any.
7015      holder = *PrototypeIterator::GetCurrent<JSObject>(iter);
7016      iter.Advance();
7017    }
7018    DCHECK(holder && holder->IsJSObject());
7019
7020    BuildCheckPrototypeMaps(handle(JSObject::cast(map->prototype())),
7021                            Handle<JSObject>(holder));
7022  }
7023
7024  LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7025  return BuildUncheckedMonomorphicElementAccess(
7026      checked_object, key, val,
7027      map->instance_type() == JS_ARRAY_TYPE,
7028      map->elements_kind(), access_type,
7029      load_mode, store_mode);
7030}
7031
7032
7033static bool CanInlineElementAccess(Handle<Map> map) {
7034  return map->IsJSObjectMap() &&
7035         (map->has_fast_elements() || map->has_fixed_typed_array_elements()) &&
7036         !map->has_indexed_interceptor() && !map->is_access_check_needed();
7037}
7038
7039
7040HInstruction* HOptimizedGraphBuilder::TryBuildConsolidatedElementLoad(
7041    HValue* object,
7042    HValue* key,
7043    HValue* val,
7044    SmallMapList* maps) {
7045  // For polymorphic loads of similar elements kinds (i.e. all tagged or all
7046  // double), always use the "worst case" code without a transition.  This is
7047  // much faster than transitioning the elements to the worst case, trading a
7048  // HTransitionElements for a HCheckMaps, and avoiding mutation of the array.
7049  bool has_double_maps = false;
7050  bool has_smi_or_object_maps = false;
7051  bool has_js_array_access = false;
7052  bool has_non_js_array_access = false;
7053  bool has_seen_holey_elements = false;
7054  Handle<Map> most_general_consolidated_map;
7055  for (int i = 0; i < maps->length(); ++i) {
7056    Handle<Map> map = maps->at(i);
7057    if (!CanInlineElementAccess(map)) return NULL;
7058    // Don't allow mixing of JSArrays with JSObjects.
7059    if (map->instance_type() == JS_ARRAY_TYPE) {
7060      if (has_non_js_array_access) return NULL;
7061      has_js_array_access = true;
7062    } else if (has_js_array_access) {
7063      return NULL;
7064    } else {
7065      has_non_js_array_access = true;
7066    }
7067    // Don't allow mixed, incompatible elements kinds.
7068    if (map->has_fast_double_elements()) {
7069      if (has_smi_or_object_maps) return NULL;
7070      has_double_maps = true;
7071    } else if (map->has_fast_smi_or_object_elements()) {
7072      if (has_double_maps) return NULL;
7073      has_smi_or_object_maps = true;
7074    } else {
7075      return NULL;
7076    }
7077    // Remember if we've ever seen holey elements.
7078    if (IsHoleyElementsKind(map->elements_kind())) {
7079      has_seen_holey_elements = true;
7080    }
7081    // Remember the most general elements kind, the code for its load will
7082    // properly handle all of the more specific cases.
7083    if ((i == 0) || IsMoreGeneralElementsKindTransition(
7084            most_general_consolidated_map->elements_kind(),
7085            map->elements_kind())) {
7086      most_general_consolidated_map = map;
7087    }
7088  }
7089  if (!has_double_maps && !has_smi_or_object_maps) return NULL;
7090
7091  HCheckMaps* checked_object = Add<HCheckMaps>(object, maps);
7092  // FAST_ELEMENTS is considered more general than FAST_HOLEY_SMI_ELEMENTS.
7093  // If we've seen both, the consolidated load must use FAST_HOLEY_ELEMENTS.
7094  ElementsKind consolidated_elements_kind = has_seen_holey_elements
7095      ? GetHoleyElementsKind(most_general_consolidated_map->elements_kind())
7096      : most_general_consolidated_map->elements_kind();
7097  LoadKeyedHoleMode load_mode = NEVER_RETURN_HOLE;
7098  if (has_seen_holey_elements) {
7099    // Make sure that all of the maps we are handling have the initial array
7100    // prototype.
7101    bool saw_non_array_prototype = false;
7102    for (int i = 0; i < maps->length(); ++i) {
7103      Handle<Map> map = maps->at(i);
7104      if (map->prototype() != *isolate()->initial_array_prototype()) {
7105        // We can't guarantee that loading the hole is safe. The prototype may
7106        // have an element at this position.
7107        saw_non_array_prototype = true;
7108        break;
7109      }
7110    }
7111
7112    if (!saw_non_array_prototype) {
7113      Handle<Map> holey_map = handle(
7114          isolate()->get_initial_js_array_map(consolidated_elements_kind));
7115      load_mode = BuildKeyedHoleMode(holey_map);
7116      if (load_mode != NEVER_RETURN_HOLE) {
7117        for (int i = 0; i < maps->length(); ++i) {
7118          Handle<Map> map = maps->at(i);
7119          // The prototype check was already done for the holey map in
7120          // BuildKeyedHoleMode.
7121          if (!map.is_identical_to(holey_map)) {
7122            Handle<JSObject> prototype(JSObject::cast(map->prototype()),
7123                                       isolate());
7124            Handle<JSObject> object_prototype =
7125                isolate()->initial_object_prototype();
7126            BuildCheckPrototypeMaps(prototype, object_prototype);
7127          }
7128        }
7129      }
7130    }
7131  }
7132  HInstruction* instr = BuildUncheckedMonomorphicElementAccess(
7133      checked_object, key, val,
7134      most_general_consolidated_map->instance_type() == JS_ARRAY_TYPE,
7135      consolidated_elements_kind, LOAD, load_mode, STANDARD_STORE);
7136  return instr;
7137}
7138
7139HValue* HOptimizedGraphBuilder::HandlePolymorphicElementAccess(
7140    Expression* expr, FeedbackSlot slot, HValue* object, HValue* key,
7141    HValue* val, SmallMapList* maps, PropertyAccessType access_type,
7142    KeyedAccessStoreMode store_mode, bool* has_side_effects) {
7143  *has_side_effects = false;
7144  BuildCheckHeapObject(object);
7145
7146  if (access_type == LOAD) {
7147    HInstruction* consolidated_load =
7148        TryBuildConsolidatedElementLoad(object, key, val, maps);
7149    if (consolidated_load != NULL) {
7150      *has_side_effects |= consolidated_load->HasObservableSideEffects();
7151      return consolidated_load;
7152    }
7153  }
7154
7155  // Elements_kind transition support.
7156  MapHandleList transition_target(maps->length());
7157  // Collect possible transition targets.
7158  MapHandleList possible_transitioned_maps(maps->length());
7159  for (int i = 0; i < maps->length(); ++i) {
7160    Handle<Map> map = maps->at(i);
7161    // Loads from strings or loads with a mix of string and non-string maps
7162    // shouldn't be handled polymorphically.
7163    DCHECK(access_type != LOAD || !map->IsStringMap());
7164    ElementsKind elements_kind = map->elements_kind();
7165    if (CanInlineElementAccess(map) && IsFastElementsKind(elements_kind) &&
7166        elements_kind != GetInitialFastElementsKind()) {
7167      possible_transitioned_maps.Add(map);
7168    }
7169    if (IsSloppyArgumentsElements(elements_kind)) {
7170      HInstruction* result =
7171          BuildKeyedGeneric(access_type, expr, slot, object, key, val);
7172      *has_side_effects = result->HasObservableSideEffects();
7173      return AddInstruction(result);
7174    }
7175  }
7176  // Get transition target for each map (NULL == no transition).
7177  for (int i = 0; i < maps->length(); ++i) {
7178    Handle<Map> map = maps->at(i);
7179    Map* transitioned_map =
7180        map->FindElementsKindTransitionedMap(&possible_transitioned_maps);
7181    if (transitioned_map != nullptr) {
7182      transition_target.Add(handle(transitioned_map));
7183    } else {
7184      transition_target.Add(Handle<Map>());
7185    }
7186  }
7187
7188  MapHandleList untransitionable_maps(maps->length());
7189  HTransitionElementsKind* transition = NULL;
7190  for (int i = 0; i < maps->length(); ++i) {
7191    Handle<Map> map = maps->at(i);
7192    DCHECK(map->IsMap());
7193    if (!transition_target.at(i).is_null()) {
7194      DCHECK(Map::IsValidElementsTransition(
7195          map->elements_kind(),
7196          transition_target.at(i)->elements_kind()));
7197      transition = Add<HTransitionElementsKind>(object, map,
7198                                                transition_target.at(i));
7199    } else {
7200      untransitionable_maps.Add(map);
7201    }
7202  }
7203
7204  // If only one map is left after transitioning, handle this case
7205  // monomorphically.
7206  DCHECK(untransitionable_maps.length() >= 1);
7207  if (untransitionable_maps.length() == 1) {
7208    Handle<Map> untransitionable_map = untransitionable_maps[0];
7209    HInstruction* instr = NULL;
7210    if (!CanInlineElementAccess(untransitionable_map)) {
7211      instr = AddInstruction(
7212          BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7213    } else {
7214      instr = BuildMonomorphicElementAccess(
7215          object, key, val, transition, untransitionable_map, access_type,
7216          store_mode);
7217    }
7218    *has_side_effects |= instr->HasObservableSideEffects();
7219    return access_type == STORE ? val : instr;
7220  }
7221
7222  HBasicBlock* join = graph()->CreateBasicBlock();
7223
7224  for (int i = 0; i < untransitionable_maps.length(); ++i) {
7225    Handle<Map> map = untransitionable_maps[i];
7226    ElementsKind elements_kind = map->elements_kind();
7227    HBasicBlock* this_map = graph()->CreateBasicBlock();
7228    HBasicBlock* other_map = graph()->CreateBasicBlock();
7229    HCompareMap* mapcompare =
7230        New<HCompareMap>(object, map, this_map, other_map);
7231    FinishCurrentBlock(mapcompare);
7232
7233    set_current_block(this_map);
7234    HInstruction* access = NULL;
7235    if (!CanInlineElementAccess(map)) {
7236      access = AddInstruction(
7237          BuildKeyedGeneric(access_type, expr, slot, object, key, val));
7238    } else {
7239      DCHECK(IsFastElementsKind(elements_kind) ||
7240             IsFixedTypedArrayElementsKind(elements_kind));
7241      LoadKeyedHoleMode load_mode = BuildKeyedHoleMode(map);
7242      // Happily, mapcompare is a checked object.
7243      access = BuildUncheckedMonomorphicElementAccess(
7244          mapcompare, key, val,
7245          map->instance_type() == JS_ARRAY_TYPE,
7246          elements_kind, access_type,
7247          load_mode,
7248          store_mode);
7249    }
7250    *has_side_effects |= access->HasObservableSideEffects();
7251    // The caller will use has_side_effects and add a correct Simulate.
7252    access->SetFlag(HValue::kHasNoObservableSideEffects);
7253    if (access_type == LOAD) {
7254      Push(access);
7255    }
7256    NoObservableSideEffectsScope scope(this);
7257    GotoNoSimulate(join);
7258    set_current_block(other_map);
7259  }
7260
7261  // Ensure that we visited at least one map above that goes to join. This is
7262  // necessary because FinishExitWithHardDeoptimization does an AbnormalExit
7263  // rather than joining the join block. If this becomes an issue, insert a
7264  // generic access in the case length() == 0.
7265  DCHECK(join->predecessors()->length() > 0);
7266  // Deopt if none of the cases matched.
7267  NoObservableSideEffectsScope scope(this);
7268  FinishExitWithHardDeoptimization(
7269      DeoptimizeReason::kUnknownMapInPolymorphicElementAccess);
7270  set_current_block(join);
7271  return access_type == STORE ? val : Pop();
7272}
7273
7274HValue* HOptimizedGraphBuilder::HandleKeyedElementAccess(
7275    HValue* obj, HValue* key, HValue* val, Expression* expr, FeedbackSlot slot,
7276    BailoutId ast_id, BailoutId return_id, PropertyAccessType access_type,
7277    bool* has_side_effects) {
7278  // A keyed name access with type feedback may contain the name.
7279  Handle<FeedbackVector> vector = handle(current_feedback_vector(), isolate());
7280  HValue* expected_key = key;
7281  if (!key->ActualValue()->IsConstant()) {
7282    Name* name = nullptr;
7283    if (access_type == LOAD) {
7284      KeyedLoadICNexus nexus(vector, slot);
7285      name = nexus.FindFirstName();
7286    } else {
7287      KeyedStoreICNexus nexus(vector, slot);
7288      name = nexus.FindFirstName();
7289    }
7290    if (name != nullptr) {
7291      Handle<Name> handle_name(name);
7292      expected_key = Add<HConstant>(handle_name);
7293      // We need a check against the key.
7294      bool in_new_space = isolate()->heap()->InNewSpace(*handle_name);
7295      Unique<Name> unique_name = Unique<Name>::CreateUninitialized(handle_name);
7296      Add<HCheckValue>(key, unique_name, in_new_space);
7297    }
7298  }
7299  if (expected_key->ActualValue()->IsConstant()) {
7300    Handle<Object> constant =
7301        HConstant::cast(expected_key->ActualValue())->handle(isolate());
7302    uint32_t array_index;
7303    if ((constant->IsString() &&
7304         !Handle<String>::cast(constant)->AsArrayIndex(&array_index)) ||
7305        constant->IsSymbol()) {
7306      if (!constant->IsUniqueName()) {
7307        constant = isolate()->factory()->InternalizeString(
7308            Handle<String>::cast(constant));
7309      }
7310      HValue* access =
7311          BuildNamedAccess(access_type, ast_id, return_id, expr, slot, obj,
7312                           Handle<Name>::cast(constant), val, false);
7313      if (access == NULL || access->IsPhi() ||
7314          HInstruction::cast(access)->IsLinked()) {
7315        *has_side_effects = false;
7316      } else {
7317        HInstruction* instr = HInstruction::cast(access);
7318        AddInstruction(instr);
7319        *has_side_effects = instr->HasObservableSideEffects();
7320      }
7321      return access;
7322    }
7323  }
7324
7325  DCHECK(!expr->IsPropertyName());
7326  HInstruction* instr = NULL;
7327
7328  SmallMapList* maps;
7329  bool monomorphic = ComputeReceiverTypes(expr, obj, &maps, this);
7330
7331  bool force_generic = false;
7332  if (expr->GetKeyType() == PROPERTY) {
7333    // Non-Generic accesses assume that elements are being accessed, and will
7334    // deopt for non-index keys, which the IC knows will occur.
7335    // TODO(jkummerow): Consider adding proper support for property accesses.
7336    force_generic = true;
7337    monomorphic = false;
7338  } else if (access_type == STORE &&
7339             (monomorphic || (maps != NULL && !maps->is_empty()))) {
7340    // Stores can't be mono/polymorphic if their prototype chain has dictionary
7341    // elements. However a receiver map that has dictionary elements itself
7342    // should be left to normal mono/poly behavior (the other maps may benefit
7343    // from highly optimized stores).
7344    for (int i = 0; i < maps->length(); i++) {
7345      Handle<Map> current_map = maps->at(i);
7346      if (current_map->DictionaryElementsInPrototypeChainOnly()) {
7347        force_generic = true;
7348        monomorphic = false;
7349        break;
7350      }
7351    }
7352  } else if (access_type == LOAD && !monomorphic &&
7353             (maps != NULL && !maps->is_empty())) {
7354    // Polymorphic loads have to go generic if any of the maps are strings.
7355    // If some, but not all of the maps are strings, we should go generic
7356    // because polymorphic access wants to key on ElementsKind and isn't
7357    // compatible with strings.
7358    for (int i = 0; i < maps->length(); i++) {
7359      Handle<Map> current_map = maps->at(i);
7360      if (current_map->IsStringMap()) {
7361        force_generic = true;
7362        break;
7363      }
7364    }
7365  }
7366
7367  if (monomorphic) {
7368    Handle<Map> map = maps->first();
7369    if (!CanInlineElementAccess(map)) {
7370      instr = AddInstruction(
7371          BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7372    } else {
7373      BuildCheckHeapObject(obj);
7374      instr = BuildMonomorphicElementAccess(
7375          obj, key, val, NULL, map, access_type, expr->GetStoreMode());
7376    }
7377  } else if (!force_generic && (maps != NULL && !maps->is_empty())) {
7378    return HandlePolymorphicElementAccess(expr, slot, obj, key, val, maps,
7379                                          access_type, expr->GetStoreMode(),
7380                                          has_side_effects);
7381  } else {
7382    if (access_type == STORE) {
7383      if (expr->IsAssignment() &&
7384          expr->AsAssignment()->HasNoTypeInformation()) {
7385        Add<HDeoptimize>(
7386            DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess,
7387            Deoptimizer::SOFT);
7388      }
7389    } else {
7390      if (expr->AsProperty()->HasNoTypeInformation()) {
7391        Add<HDeoptimize>(
7392            DeoptimizeReason::kInsufficientTypeFeedbackForGenericKeyedAccess,
7393            Deoptimizer::SOFT);
7394      }
7395    }
7396    instr = AddInstruction(
7397        BuildKeyedGeneric(access_type, expr, slot, obj, key, val));
7398  }
7399  *has_side_effects = instr->HasObservableSideEffects();
7400  return instr;
7401}
7402
7403
7404void HOptimizedGraphBuilder::EnsureArgumentsArePushedForAccess() {
7405  // Outermost function already has arguments on the stack.
7406  if (function_state()->outer() == NULL) return;
7407
7408  if (function_state()->arguments_pushed()) return;
7409
7410  // Push arguments when entering inlined function.
7411  HEnterInlined* entry = function_state()->entry();
7412  entry->set_arguments_pushed();
7413
7414  HArgumentsObject* arguments = entry->arguments_object();
7415  const ZoneList<HValue*>* arguments_values = arguments->arguments_values();
7416
7417  HInstruction* insert_after = entry;
7418  for (int i = 0; i < arguments_values->length(); i++) {
7419    HValue* argument = arguments_values->at(i);
7420    HInstruction* push_argument = New<HPushArguments>(argument);
7421    push_argument->InsertAfter(insert_after);
7422    insert_after = push_argument;
7423  }
7424
7425  HArgumentsElements* arguments_elements = New<HArgumentsElements>(true);
7426  arguments_elements->ClearFlag(HValue::kUseGVN);
7427  arguments_elements->InsertAfter(insert_after);
7428  function_state()->set_arguments_elements(arguments_elements);
7429}
7430
7431bool HOptimizedGraphBuilder::IsAnyParameterContextAllocated() {
7432  int count = current_info()->scope()->num_parameters();
7433  for (int i = 0; i < count; ++i) {
7434    if (current_info()->scope()->parameter(i)->location() ==
7435        VariableLocation::CONTEXT) {
7436      return true;
7437    }
7438  }
7439  return false;
7440}
7441
7442bool HOptimizedGraphBuilder::TryArgumentsAccess(Property* expr) {
7443  VariableProxy* proxy = expr->obj()->AsVariableProxy();
7444  if (proxy == NULL) return false;
7445  if (!proxy->var()->IsStackAllocated()) return false;
7446  if (!environment()->Lookup(proxy->var())->CheckFlag(HValue::kIsArguments)) {
7447    return false;
7448  }
7449
7450  HInstruction* result = NULL;
7451  if (expr->key()->IsPropertyName()) {
7452    Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7453    if (!String::Equals(name, isolate()->factory()->length_string())) {
7454      return false;
7455    }
7456
7457    // Make sure we visit the arguments object so that the liveness analysis
7458    // still records the access.
7459    CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
7460    Drop(1);
7461
7462    if (function_state()->outer() == NULL) {
7463      HInstruction* elements = Add<HArgumentsElements>(false);
7464      result = New<HArgumentsLength>(elements);
7465    } else {
7466      // Number of arguments without receiver.
7467      int argument_count = environment()->
7468          arguments_environment()->parameter_count() - 1;
7469      result = New<HConstant>(argument_count);
7470    }
7471  } else {
7472    // We need to take into account the KEYED_LOAD_IC feedback to guard the
7473    // HBoundsCheck instructions below.
7474    if (!expr->IsMonomorphic() && !expr->IsUninitialized()) return false;
7475    if (IsAnyParameterContextAllocated()) return false;
7476    CHECK_ALIVE_OR_RETURN(VisitForValue(expr->obj(), ARGUMENTS_ALLOWED), true);
7477    CHECK_ALIVE_OR_RETURN(VisitForValue(expr->key()), true);
7478    HValue* key = Pop();
7479    Drop(1);  // Arguments object.
7480    if (function_state()->outer() == NULL) {
7481      HInstruction* elements = Add<HArgumentsElements>(false);
7482      HInstruction* length = Add<HArgumentsLength>(elements);
7483      HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7484      result = New<HAccessArgumentsAt>(elements, length, checked_key);
7485    } else {
7486      EnsureArgumentsArePushedForAccess();
7487
7488      // Number of arguments without receiver.
7489      HInstruction* elements = function_state()->arguments_elements();
7490      int argument_count = environment()->
7491          arguments_environment()->parameter_count() - 1;
7492      HInstruction* length = Add<HConstant>(argument_count);
7493      HInstruction* checked_key = Add<HBoundsCheck>(key, length);
7494      result = New<HAccessArgumentsAt>(elements, length, checked_key);
7495    }
7496  }
7497  ast_context()->ReturnInstruction(result, expr->id());
7498  return true;
7499}
7500
7501HValue* HOptimizedGraphBuilder::BuildNamedAccess(
7502    PropertyAccessType access, BailoutId ast_id, BailoutId return_id,
7503    Expression* expr, FeedbackSlot slot, HValue* object, Handle<Name> name,
7504    HValue* value, bool is_uninitialized) {
7505  SmallMapList* maps;
7506  ComputeReceiverTypes(expr, object, &maps, this);
7507  DCHECK(maps != NULL);
7508
7509  // Check for special case: Access via a single map to the global proxy
7510  // can also be handled monomorphically.
7511  if (maps->length() > 0) {
7512    Handle<Object> map_constructor =
7513        handle(maps->first()->GetConstructor(), isolate());
7514    if (map_constructor->IsJSFunction()) {
7515      Handle<Context> map_context =
7516          handle(Handle<JSFunction>::cast(map_constructor)->context());
7517      Handle<Context> current_context(current_info()->context());
7518      bool is_same_context_global_proxy_access =
7519          maps->length() == 1 &&  // >1 map => fallback to polymorphic
7520          maps->first()->IsJSGlobalProxyMap() &&
7521          (*map_context == *current_context);
7522      if (is_same_context_global_proxy_access) {
7523        Handle<JSGlobalObject> global_object(current_info()->global_object());
7524        LookupIterator it(global_object, name, LookupIterator::OWN);
7525        if (CanInlineGlobalPropertyAccess(&it, access)) {
7526          BuildCheckHeapObject(object);
7527          Add<HCheckMaps>(object, maps);
7528          if (access == LOAD) {
7529            InlineGlobalPropertyLoad(&it, expr->id());
7530            return nullptr;
7531          } else {
7532            return InlineGlobalPropertyStore(&it, value, expr->id());
7533          }
7534        }
7535      }
7536    }
7537
7538    PropertyAccessInfo info(this, access, maps->first(), name);
7539    if (!info.CanAccessAsMonomorphic(maps)) {
7540      HandlePolymorphicNamedFieldAccess(access, expr, slot, ast_id, return_id,
7541                                        object, value, maps, name);
7542      return NULL;
7543    }
7544
7545    HValue* checked_object;
7546    // AstType::Number() is only supported by polymorphic load/call handling.
7547    DCHECK(!info.IsNumberType());
7548    BuildCheckHeapObject(object);
7549    if (AreStringTypes(maps)) {
7550      checked_object =
7551          Add<HCheckInstanceType>(object, HCheckInstanceType::IS_STRING);
7552    } else {
7553      checked_object = Add<HCheckMaps>(object, maps);
7554    }
7555    return BuildMonomorphicAccess(
7556        &info, object, checked_object, value, ast_id, return_id);
7557  }
7558
7559  return BuildNamedGeneric(access, expr, slot, object, name, value,
7560                           is_uninitialized);
7561}
7562
7563
7564void HOptimizedGraphBuilder::PushLoad(Property* expr,
7565                                      HValue* object,
7566                                      HValue* key) {
7567  ValueContext for_value(this, ARGUMENTS_NOT_ALLOWED);
7568  Push(object);
7569  if (key != NULL) Push(key);
7570  BuildLoad(expr, expr->LoadId());
7571}
7572
7573
7574void HOptimizedGraphBuilder::BuildLoad(Property* expr,
7575                                       BailoutId ast_id) {
7576  HInstruction* instr = NULL;
7577  if (expr->IsStringAccess() && expr->GetKeyType() == ELEMENT) {
7578    HValue* index = Pop();
7579    HValue* string = Pop();
7580    HInstruction* char_code = BuildStringCharCodeAt(string, index);
7581    AddInstruction(char_code);
7582    if (char_code->IsConstant()) {
7583      HConstant* c_code = HConstant::cast(char_code);
7584      if (c_code->HasNumberValue() && std::isnan(c_code->DoubleValue())) {
7585        Add<HDeoptimize>(DeoptimizeReason::kOutOfBounds, Deoptimizer::EAGER);
7586      }
7587    }
7588    instr = NewUncasted<HStringCharFromCode>(char_code);
7589
7590  } else if (expr->key()->IsPropertyName()) {
7591    Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
7592    HValue* object = Pop();
7593
7594    HValue* value = BuildNamedAccess(LOAD, ast_id, expr->LoadId(), expr,
7595                                     expr->PropertyFeedbackSlot(), object, name,
7596                                     NULL, expr->IsUninitialized());
7597    if (value == NULL) return;
7598    if (value->IsPhi()) return ast_context()->ReturnValue(value);
7599    instr = HInstruction::cast(value);
7600    if (instr->IsLinked()) return ast_context()->ReturnValue(instr);
7601
7602  } else {
7603    HValue* key = Pop();
7604    HValue* obj = Pop();
7605
7606    bool has_side_effects = false;
7607    HValue* load = HandleKeyedElementAccess(
7608        obj, key, NULL, expr, expr->PropertyFeedbackSlot(), ast_id,
7609        expr->LoadId(), LOAD, &has_side_effects);
7610    if (has_side_effects) {
7611      if (ast_context()->IsEffect()) {
7612        Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7613      } else {
7614        Push(load);
7615        Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
7616        Drop(1);
7617      }
7618    }
7619    if (load == NULL) return;
7620    return ast_context()->ReturnValue(load);
7621  }
7622  return ast_context()->ReturnInstruction(instr, ast_id);
7623}
7624
7625
7626void HOptimizedGraphBuilder::VisitProperty(Property* expr) {
7627  DCHECK(!HasStackOverflow());
7628  DCHECK(current_block() != NULL);
7629  DCHECK(current_block()->HasPredecessor());
7630
7631  if (TryArgumentsAccess(expr)) return;
7632
7633  CHECK_ALIVE(VisitForValue(expr->obj()));
7634  if (!expr->key()->IsPropertyName() || expr->IsStringAccess()) {
7635    CHECK_ALIVE(VisitForValue(expr->key()));
7636  }
7637
7638  BuildLoad(expr, expr->id());
7639}
7640
7641HInstruction* HGraphBuilder::BuildConstantMapCheck(Handle<JSObject> constant,
7642                                                   bool ensure_no_elements) {
7643  HCheckMaps* check = Add<HCheckMaps>(
7644      Add<HConstant>(constant), handle(constant->map()));
7645  check->ClearDependsOnFlag(kElementsKind);
7646  if (ensure_no_elements) {
7647    // TODO(ishell): remove this once we support NO_ELEMENTS elements kind.
7648    HValue* elements = AddLoadElements(check, nullptr);
7649    HValue* empty_elements =
7650        Add<HConstant>(isolate()->factory()->empty_fixed_array());
7651    IfBuilder if_empty(this);
7652    if_empty.IfNot<HCompareObjectEqAndBranch>(elements, empty_elements);
7653    if_empty.ThenDeopt(DeoptimizeReason::kWrongMap);
7654    if_empty.End();
7655  }
7656  return check;
7657}
7658
7659HInstruction* HGraphBuilder::BuildCheckPrototypeMaps(Handle<JSObject> prototype,
7660                                                     Handle<JSObject> holder,
7661                                                     bool ensure_no_elements) {
7662  PrototypeIterator iter(isolate(), prototype, kStartAtReceiver);
7663  while (holder.is_null() ||
7664         !PrototypeIterator::GetCurrent(iter).is_identical_to(holder)) {
7665    BuildConstantMapCheck(PrototypeIterator::GetCurrent<JSObject>(iter),
7666                          ensure_no_elements);
7667    iter.Advance();
7668    if (iter.IsAtEnd()) {
7669      return NULL;
7670    }
7671  }
7672  return BuildConstantMapCheck(holder);
7673}
7674
7675
7676void HOptimizedGraphBuilder::AddCheckPrototypeMaps(Handle<JSObject> holder,
7677                                                   Handle<Map> receiver_map) {
7678  if (!holder.is_null()) {
7679    Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
7680    BuildCheckPrototypeMaps(prototype, holder);
7681  }
7682}
7683
7684void HOptimizedGraphBuilder::BuildEnsureCallable(HValue* object) {
7685  NoObservableSideEffectsScope scope(this);
7686  const Runtime::Function* throw_called_non_callable =
7687      Runtime::FunctionForId(Runtime::kThrowCalledNonCallable);
7688
7689  IfBuilder is_not_function(this);
7690  HValue* smi_check = is_not_function.If<HIsSmiAndBranch>(object);
7691  is_not_function.Or();
7692  HValue* map = AddLoadMap(object, smi_check);
7693  HValue* bit_field =
7694      Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapBitField());
7695  HValue* bit_field_masked = AddUncasted<HBitwise>(
7696      Token::BIT_AND, bit_field, Add<HConstant>(1 << Map::kIsCallable));
7697  is_not_function.IfNot<HCompareNumericAndBranch>(
7698      bit_field_masked, Add<HConstant>(1 << Map::kIsCallable), Token::EQ);
7699  is_not_function.Then();
7700  {
7701    Add<HPushArguments>(object);
7702    Add<HCallRuntime>(throw_called_non_callable, 1);
7703  }
7704  is_not_function.End();
7705}
7706
7707HInstruction* HOptimizedGraphBuilder::NewCallFunction(
7708    HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
7709    ConvertReceiverMode convert_mode, TailCallMode tail_call_mode) {
7710  if (syntactic_tail_call_mode == TailCallMode::kAllow) {
7711    BuildEnsureCallable(function);
7712  } else {
7713    DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
7714  }
7715  HValue* arity = Add<HConstant>(argument_count - 1);
7716
7717  HValue* op_vals[] = {function, arity};
7718
7719  Callable callable =
7720      CodeFactory::Call(isolate(), convert_mode, tail_call_mode);
7721  HConstant* stub = Add<HConstant>(callable.code());
7722
7723  return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
7724                                  ArrayVector(op_vals),
7725                                  syntactic_tail_call_mode);
7726}
7727
7728HInstruction* HOptimizedGraphBuilder::NewCallFunctionViaIC(
7729    HValue* function, int argument_count, TailCallMode syntactic_tail_call_mode,
7730    ConvertReceiverMode convert_mode, TailCallMode tail_call_mode,
7731    FeedbackSlot slot) {
7732  if (syntactic_tail_call_mode == TailCallMode::kAllow) {
7733    BuildEnsureCallable(function);
7734  } else {
7735    DCHECK_EQ(TailCallMode::kDisallow, tail_call_mode);
7736  }
7737  int arity = argument_count - 1;
7738  Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
7739  HValue* arity_val = Add<HConstant>(arity);
7740  HValue* index_val = Add<HConstant>(vector->GetIndex(slot));
7741  HValue* vector_val = Add<HConstant>(vector);
7742
7743  HValue* op_vals[] = {function, arity_val, index_val, vector_val};
7744  Callable callable =
7745      CodeFactory::CallIC(isolate(), convert_mode, tail_call_mode);
7746  HConstant* stub = Add<HConstant>(callable.code());
7747
7748  return New<HCallWithDescriptor>(stub, argument_count, callable.descriptor(),
7749                                  ArrayVector(op_vals),
7750                                  syntactic_tail_call_mode);
7751}
7752
7753HInstruction* HOptimizedGraphBuilder::NewCallConstantFunction(
7754    Handle<JSFunction> function, int argument_count,
7755    TailCallMode syntactic_tail_call_mode, TailCallMode tail_call_mode) {
7756  HValue* target = Add<HConstant>(function);
7757  return New<HInvokeFunction>(target, function, argument_count,
7758                              syntactic_tail_call_mode, tail_call_mode);
7759}
7760
7761
7762class FunctionSorter {
7763 public:
7764  explicit FunctionSorter(int index = 0, int ticks = 0, int size = 0)
7765      : index_(index), ticks_(ticks), size_(size) {}
7766
7767  int index() const { return index_; }
7768  int ticks() const { return ticks_; }
7769  int size() const { return size_; }
7770
7771 private:
7772  int index_;
7773  int ticks_;
7774  int size_;
7775};
7776
7777
7778inline bool operator<(const FunctionSorter& lhs, const FunctionSorter& rhs) {
7779  int diff = lhs.ticks() - rhs.ticks();
7780  if (diff != 0) return diff > 0;
7781  return lhs.size() < rhs.size();
7782}
7783
7784
7785void HOptimizedGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
7786                                                        HValue* receiver,
7787                                                        SmallMapList* maps,
7788                                                        Handle<String> name) {
7789  int argument_count = expr->arguments()->length() + 1;  // Includes receiver.
7790  FunctionSorter order[kMaxCallPolymorphism];
7791
7792  bool handle_smi = false;
7793  bool handled_string = false;
7794  int ordered_functions = 0;
7795
7796  TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
7797  TailCallMode tail_call_mode =
7798      function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
7799
7800  int i;
7801  for (i = 0; i < maps->length() && ordered_functions < kMaxCallPolymorphism;
7802       ++i) {
7803    PropertyAccessInfo info(this, LOAD, maps->at(i), name);
7804    if (info.CanAccessMonomorphic() && info.IsDataConstant() &&
7805        info.constant()->IsJSFunction()) {
7806      if (info.IsStringType()) {
7807        if (handled_string) continue;
7808        handled_string = true;
7809      }
7810      Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7811      if (info.IsNumberType()) {
7812        handle_smi = true;
7813      }
7814      expr->set_target(target);
7815      order[ordered_functions++] = FunctionSorter(
7816          i, target->shared()->profiler_ticks(), InliningAstSize(target));
7817    }
7818  }
7819
7820  std::sort(order, order + ordered_functions);
7821
7822  if (i < maps->length()) {
7823    maps->Clear();
7824    ordered_functions = -1;
7825  }
7826
7827  HBasicBlock* number_block = NULL;
7828  HBasicBlock* join = NULL;
7829  handled_string = false;
7830  int count = 0;
7831
7832  for (int fn = 0; fn < ordered_functions; ++fn) {
7833    int i = order[fn].index();
7834    PropertyAccessInfo info(this, LOAD, maps->at(i), name);
7835    if (info.IsStringType()) {
7836      if (handled_string) continue;
7837      handled_string = true;
7838    }
7839    // Reloads the target.
7840    info.CanAccessMonomorphic();
7841    Handle<JSFunction> target = Handle<JSFunction>::cast(info.constant());
7842
7843    expr->set_target(target);
7844    if (count == 0) {
7845      // Only needed once.
7846      join = graph()->CreateBasicBlock();
7847      if (handle_smi) {
7848        HBasicBlock* empty_smi_block = graph()->CreateBasicBlock();
7849        HBasicBlock* not_smi_block = graph()->CreateBasicBlock();
7850        number_block = graph()->CreateBasicBlock();
7851        FinishCurrentBlock(New<HIsSmiAndBranch>(
7852                receiver, empty_smi_block, not_smi_block));
7853        GotoNoSimulate(empty_smi_block, number_block);
7854        set_current_block(not_smi_block);
7855      } else {
7856        BuildCheckHeapObject(receiver);
7857      }
7858    }
7859    ++count;
7860    HBasicBlock* if_true = graph()->CreateBasicBlock();
7861    HBasicBlock* if_false = graph()->CreateBasicBlock();
7862    HUnaryControlInstruction* compare;
7863
7864    Handle<Map> map = info.map();
7865    if (info.IsNumberType()) {
7866      Handle<Map> heap_number_map = isolate()->factory()->heap_number_map();
7867      compare = New<HCompareMap>(receiver, heap_number_map, if_true, if_false);
7868    } else if (info.IsStringType()) {
7869      compare = New<HIsStringAndBranch>(receiver, if_true, if_false);
7870    } else {
7871      compare = New<HCompareMap>(receiver, map, if_true, if_false);
7872    }
7873    FinishCurrentBlock(compare);
7874
7875    if (info.IsNumberType()) {
7876      GotoNoSimulate(if_true, number_block);
7877      if_true = number_block;
7878    }
7879
7880    set_current_block(if_true);
7881
7882    AddCheckPrototypeMaps(info.holder(), map);
7883
7884    HValue* function = Add<HConstant>(expr->target());
7885    environment()->SetExpressionStackAt(0, function);
7886    Push(receiver);
7887    CHECK_ALIVE(VisitExpressions(expr->arguments()));
7888    bool needs_wrapping = info.NeedsWrappingFor(target);
7889    bool try_inline = FLAG_polymorphic_inlining && !needs_wrapping;
7890    if (FLAG_trace_inlining && try_inline) {
7891      Handle<JSFunction> caller = current_info()->closure();
7892      std::unique_ptr<char[]> caller_name =
7893          caller->shared()->DebugName()->ToCString();
7894      PrintF("Trying to inline the polymorphic call to %s from %s\n",
7895             name->ToCString().get(),
7896             caller_name.get());
7897    }
7898    if (try_inline && TryInlineCall(expr)) {
7899      // Trying to inline will signal that we should bailout from the
7900      // entire compilation by setting stack overflow on the visitor.
7901      if (HasStackOverflow()) return;
7902    } else {
7903      // Since HWrapReceiver currently cannot actually wrap numbers and strings,
7904      // use the regular call builtin for method calls to wrap the receiver.
7905      // TODO(verwaest): Support creation of value wrappers directly in
7906      // HWrapReceiver.
7907      HInstruction* call =
7908          needs_wrapping
7909              ? NewCallFunction(
7910                    function, argument_count, syntactic_tail_call_mode,
7911                    ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode)
7912              : NewCallConstantFunction(target, argument_count,
7913                                        syntactic_tail_call_mode,
7914                                        tail_call_mode);
7915      PushArgumentsFromEnvironment(argument_count);
7916      AddInstruction(call);
7917      Drop(1);  // Drop the function.
7918      if (!ast_context()->IsEffect()) Push(call);
7919    }
7920
7921    if (current_block() != NULL) Goto(join);
7922    set_current_block(if_false);
7923  }
7924
7925  // Finish up.  Unconditionally deoptimize if we've handled all the maps we
7926  // know about and do not want to handle ones we've never seen.  Otherwise
7927  // use a generic IC.
7928  if (ordered_functions == maps->length() && FLAG_deoptimize_uncommon_cases) {
7929    FinishExitWithHardDeoptimization(
7930        DeoptimizeReason::kUnknownMapInPolymorphicCall);
7931  } else {
7932    Property* prop = expr->expression()->AsProperty();
7933    HInstruction* function =
7934        BuildNamedGeneric(LOAD, prop, prop->PropertyFeedbackSlot(), receiver,
7935                          name, NULL, prop->IsUninitialized());
7936    AddInstruction(function);
7937    Push(function);
7938    AddSimulate(prop->LoadId(), REMOVABLE_SIMULATE);
7939
7940    environment()->SetExpressionStackAt(1, function);
7941    environment()->SetExpressionStackAt(0, receiver);
7942    CHECK_ALIVE(VisitExpressions(expr->arguments()));
7943
7944    HInstruction* call = NewCallFunction(
7945        function, argument_count, syntactic_tail_call_mode,
7946        ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
7947
7948    PushArgumentsFromEnvironment(argument_count);
7949
7950    Drop(1);  // Function.
7951
7952    if (join != NULL) {
7953      AddInstruction(call);
7954      if (!ast_context()->IsEffect()) Push(call);
7955      Goto(join);
7956    } else {
7957      return ast_context()->ReturnInstruction(call, expr->id());
7958    }
7959  }
7960
7961  // We assume that control flow is always live after an expression.  So
7962  // even without predecessors to the join block, we set it as the exit
7963  // block and continue by adding instructions there.
7964  DCHECK(join != NULL);
7965  if (join->HasPredecessor()) {
7966    set_current_block(join);
7967    join->SetJoinId(expr->id());
7968    if (!ast_context()->IsEffect()) return ast_context()->ReturnValue(Pop());
7969  } else {
7970    set_current_block(NULL);
7971  }
7972}
7973
7974void HOptimizedGraphBuilder::TraceInline(Handle<JSFunction> target,
7975                                         Handle<JSFunction> caller,
7976                                         const char* reason,
7977                                         TailCallMode tail_call_mode) {
7978  if (FLAG_trace_inlining) {
7979    std::unique_ptr<char[]> target_name =
7980        target->shared()->DebugName()->ToCString();
7981    std::unique_ptr<char[]> caller_name =
7982        caller->shared()->DebugName()->ToCString();
7983    if (reason == NULL) {
7984      const char* call_mode =
7985          tail_call_mode == TailCallMode::kAllow ? "tail called" : "called";
7986      PrintF("Inlined %s %s from %s.\n", target_name.get(), call_mode,
7987             caller_name.get());
7988    } else {
7989      PrintF("Did not inline %s called from %s (%s).\n",
7990             target_name.get(), caller_name.get(), reason);
7991    }
7992  }
7993}
7994
7995
7996static const int kNotInlinable = 1000000000;
7997
7998
7999int HOptimizedGraphBuilder::InliningAstSize(Handle<JSFunction> target) {
8000  if (!FLAG_use_inlining) return kNotInlinable;
8001
8002  // Precondition: call is monomorphic and we have found a target with the
8003  // appropriate arity.
8004  Handle<JSFunction> caller = current_info()->closure();
8005  Handle<SharedFunctionInfo> target_shared(target->shared());
8006
8007  // Always inline functions that force inlining.
8008  if (target_shared->force_inline()) {
8009    return 0;
8010  }
8011  if (!target->shared()->IsUserJavaScript()) {
8012    return kNotInlinable;
8013  }
8014
8015  if (target_shared->IsApiFunction()) {
8016    TraceInline(target, caller, "target is api function");
8017    return kNotInlinable;
8018  }
8019
8020  // Do a quick check on source code length to avoid parsing large
8021  // inlining candidates.
8022  if (target_shared->SourceSize() >
8023      Min(FLAG_max_inlined_source_size, kUnlimitedMaxInlinedSourceSize)) {
8024    TraceInline(target, caller, "target text too big");
8025    return kNotInlinable;
8026  }
8027
8028  // Target must be inlineable.
8029  BailoutReason noopt_reason = target_shared->disable_optimization_reason();
8030  if (!target_shared->IsInlineable() && noopt_reason != kHydrogenFilter) {
8031    TraceInline(target, caller, "target not inlineable");
8032    return kNotInlinable;
8033  }
8034  if (noopt_reason != kNoReason && noopt_reason != kHydrogenFilter) {
8035    TraceInline(target, caller, "target contains unsupported syntax [early]");
8036    return kNotInlinable;
8037  }
8038
8039  int nodes_added = target_shared->ast_node_count();
8040  return nodes_added;
8041}
8042
8043bool HOptimizedGraphBuilder::TryInline(Handle<JSFunction> target,
8044                                       int arguments_count,
8045                                       HValue* implicit_return_value,
8046                                       BailoutId ast_id, BailoutId return_id,
8047                                       InliningKind inlining_kind,
8048                                       TailCallMode syntactic_tail_call_mode) {
8049  if (target->context()->native_context() !=
8050      top_info()->closure()->context()->native_context()) {
8051    return false;
8052  }
8053  int nodes_added = InliningAstSize(target);
8054  if (nodes_added == kNotInlinable) return false;
8055
8056  Handle<JSFunction> caller = current_info()->closure();
8057  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8058    TraceInline(target, caller, "target AST is too large [early]");
8059    return false;
8060  }
8061
8062  // Don't inline deeper than the maximum number of inlining levels.
8063  HEnvironment* env = environment();
8064  int current_level = 1;
8065  while (env->outer() != NULL) {
8066    if (current_level == FLAG_max_inlining_levels) {
8067      TraceInline(target, caller, "inline depth limit reached");
8068      return false;
8069    }
8070    if (env->outer()->frame_type() == JS_FUNCTION) {
8071      current_level++;
8072    }
8073    env = env->outer();
8074  }
8075
8076  // Don't inline recursive functions.
8077  for (FunctionState* state = function_state();
8078       state != NULL;
8079       state = state->outer()) {
8080    if (*state->compilation_info()->closure() == *target) {
8081      TraceInline(target, caller, "target is recursive");
8082      return false;
8083    }
8084  }
8085
8086  // We don't want to add more than a certain number of nodes from inlining.
8087  // Always inline small methods (<= 10 nodes).
8088  if (inlined_count_ > Min(FLAG_max_inlined_nodes_cumulative,
8089                           kUnlimitedMaxInlinedNodesCumulative)) {
8090    TraceInline(target, caller, "cumulative AST node limit reached");
8091    return false;
8092  }
8093
8094  // Parse and allocate variables.
8095  // Use the same AstValueFactory for creating strings in the sub-compilation
8096  // step, but don't transfer ownership to target_info.
8097  Handle<SharedFunctionInfo> target_shared(target->shared());
8098  ParseInfo parse_info(target_shared, top_info()->parse_info()->zone_shared());
8099  parse_info.set_ast_value_factory(
8100      top_info()->parse_info()->ast_value_factory());
8101  parse_info.set_ast_value_factory_owned(false);
8102
8103  CompilationInfo target_info(parse_info.zone(), &parse_info, target);
8104
8105  if (inlining_kind != CONSTRUCT_CALL_RETURN &&
8106      IsClassConstructor(target_shared->kind())) {
8107    TraceInline(target, caller, "target is classConstructor");
8108    return false;
8109  }
8110
8111  if (target_shared->HasDebugInfo()) {
8112    TraceInline(target, caller, "target is being debugged");
8113    return false;
8114  }
8115  if (!Compiler::ParseAndAnalyze(target_info.parse_info())) {
8116    if (target_info.isolate()->has_pending_exception()) {
8117      // Parse or scope error, never optimize this function.
8118      SetStackOverflow();
8119      target_shared->DisableOptimization(kParseScopeError);
8120    }
8121    TraceInline(target, caller, "parse failure");
8122    return false;
8123  }
8124  if (target_shared->must_use_ignition_turbo()) {
8125    TraceInline(target, caller, "ParseAndAnalyze found incompatibility");
8126    return false;
8127  }
8128
8129  if (target_info.scope()->NeedsContext()) {
8130    TraceInline(target, caller, "target has context-allocated variables");
8131    return false;
8132  }
8133
8134  if (target_info.scope()->rest_parameter() != nullptr) {
8135    TraceInline(target, caller, "target uses rest parameters");
8136    return false;
8137  }
8138
8139  FunctionLiteral* function = target_info.literal();
8140
8141  // The following conditions must be checked again after re-parsing, because
8142  // earlier the information might not have been complete due to lazy parsing.
8143  nodes_added = function->ast_node_count();
8144  if (nodes_added > Min(FLAG_max_inlined_nodes, kUnlimitedMaxInlinedNodes)) {
8145    TraceInline(target, caller, "target AST is too large [late]");
8146    return false;
8147  }
8148  if (function->dont_optimize()) {
8149    TraceInline(target, caller, "target contains unsupported syntax [late]");
8150    return false;
8151  }
8152
8153  // If the function uses the arguments object check that inlining of functions
8154  // with arguments object is enabled and the arguments-variable is
8155  // stack allocated.
8156  if (function->scope()->arguments() != NULL) {
8157    if (!FLAG_inline_arguments) {
8158      TraceInline(target, caller, "target uses arguments object");
8159      return false;
8160    }
8161  }
8162
8163  // Unsupported variable references present.
8164  if (function->scope()->this_function_var() != nullptr ||
8165      function->scope()->new_target_var() != nullptr) {
8166    TraceInline(target, caller, "target uses new target or this function");
8167    return false;
8168  }
8169
8170  // All declarations must be inlineable.
8171  Declaration::List* decls = target_info.scope()->declarations();
8172  for (Declaration* decl : *decls) {
8173    if (decl->IsFunctionDeclaration() ||
8174        !decl->proxy()->var()->IsStackAllocated()) {
8175      TraceInline(target, caller, "target has non-trivial declaration");
8176      return false;
8177    }
8178  }
8179
8180  // Generate the deoptimization data for the unoptimized version of
8181  // the target function if we don't already have it.
8182  if (!Compiler::EnsureDeoptimizationSupport(&target_info)) {
8183    TraceInline(target, caller, "could not generate deoptimization info");
8184    return false;
8185  }
8186
8187  // Remember that we inlined this function. This needs to be called right
8188  // after the EnsureDeoptimizationSupport call so that the code flusher
8189  // does not remove the code with the deoptimization support.
8190  int inlining_id = top_info()->AddInlinedFunction(target_info.shared_info(),
8191                                                   source_position());
8192
8193  // ----------------------------------------------------------------
8194  // After this point, we've made a decision to inline this function (so
8195  // TryInline should always return true).
8196
8197  // If target was lazily compiled, it's literals array may not yet be set up.
8198  JSFunction::EnsureLiterals(target);
8199
8200  // Type-check the inlined function.
8201  DCHECK(target_shared->has_deoptimization_support());
8202  AstTyper(target_info.isolate(), target_info.zone(), target_info.closure(),
8203           target_info.scope(), target_info.osr_ast_id(), target_info.literal(),
8204           &bounds_)
8205      .Run();
8206
8207  // Save the pending call context. Set up new one for the inlined function.
8208  // The function state is new-allocated because we need to delete it
8209  // in two different places.
8210  FunctionState* target_state = new FunctionState(
8211      this, &target_info, inlining_kind, inlining_id,
8212      function_state()->ComputeTailCallMode(syntactic_tail_call_mode));
8213
8214  HConstant* undefined = graph()->GetConstantUndefined();
8215
8216  HEnvironment* inner_env = environment()->CopyForInlining(
8217      target, arguments_count, function, undefined,
8218      function_state()->inlining_kind(), syntactic_tail_call_mode);
8219
8220  HConstant* context = Add<HConstant>(Handle<Context>(target->context()));
8221  inner_env->BindContext(context);
8222
8223  // Create a dematerialized arguments object for the function, also copy the
8224  // current arguments values to use them for materialization.
8225  HEnvironment* arguments_env = inner_env->arguments_environment();
8226  int parameter_count = arguments_env->parameter_count();
8227  HArgumentsObject* arguments_object = Add<HArgumentsObject>(parameter_count);
8228  for (int i = 0; i < parameter_count; i++) {
8229    arguments_object->AddArgument(arguments_env->Lookup(i), zone());
8230  }
8231
8232  // If the function uses arguments object then bind bind one.
8233  if (function->scope()->arguments() != NULL) {
8234    DCHECK(function->scope()->arguments()->IsStackAllocated());
8235    inner_env->Bind(function->scope()->arguments(), arguments_object);
8236  }
8237
8238  // Capture the state before invoking the inlined function for deopt in the
8239  // inlined function. This simulate has no bailout-id since it's not directly
8240  // reachable for deopt, and is only used to capture the state. If the simulate
8241  // becomes reachable by merging, the ast id of the simulate merged into it is
8242  // adopted.
8243  Add<HSimulate>(BailoutId::None());
8244
8245  current_block()->UpdateEnvironment(inner_env);
8246  Scope* saved_scope = scope();
8247  set_scope(target_info.scope());
8248  HEnterInlined* enter_inlined = Add<HEnterInlined>(
8249      return_id, target, context, arguments_count, function,
8250      function_state()->inlining_kind(), function->scope()->arguments(),
8251      arguments_object, syntactic_tail_call_mode);
8252  if (is_tracking_positions()) {
8253    enter_inlined->set_inlining_id(inlining_id);
8254  }
8255
8256  function_state()->set_entry(enter_inlined);
8257
8258  VisitDeclarations(target_info.scope()->declarations());
8259  VisitStatements(function->body());
8260  set_scope(saved_scope);
8261  if (HasStackOverflow()) {
8262    // Bail out if the inline function did, as we cannot residualize a call
8263    // instead, but do not disable optimization for the outer function.
8264    TraceInline(target, caller, "inline graph construction failed");
8265    target_shared->DisableOptimization(kInliningBailedOut);
8266    current_info()->RetryOptimization(kInliningBailedOut);
8267    delete target_state;
8268    return true;
8269  }
8270
8271  // Update inlined nodes count.
8272  inlined_count_ += nodes_added;
8273
8274  Handle<Code> unoptimized_code(target_shared->code());
8275  DCHECK(unoptimized_code->kind() == Code::FUNCTION);
8276  Handle<TypeFeedbackInfo> type_info(
8277      TypeFeedbackInfo::cast(unoptimized_code->type_feedback_info()));
8278  graph()->update_type_change_checksum(type_info->own_type_change_checksum());
8279
8280  TraceInline(target, caller, NULL, syntactic_tail_call_mode);
8281
8282  if (current_block() != NULL) {
8283    FunctionState* state = function_state();
8284    if (state->inlining_kind() == CONSTRUCT_CALL_RETURN) {
8285      // Falling off the end of an inlined construct call. In a test context the
8286      // return value will always evaluate to true, in a value context the
8287      // return value is the newly allocated receiver.
8288      if (call_context()->IsTest()) {
8289        inlined_test_context()->ReturnValue(graph()->GetConstantTrue());
8290      } else if (call_context()->IsEffect()) {
8291        Goto(function_return(), state);
8292      } else {
8293        DCHECK(call_context()->IsValue());
8294        AddLeaveInlined(implicit_return_value, state);
8295      }
8296    } else if (state->inlining_kind() == SETTER_CALL_RETURN) {
8297      // Falling off the end of an inlined setter call. The returned value is
8298      // never used, the value of an assignment is always the value of the RHS
8299      // of the assignment.
8300      if (call_context()->IsTest()) {
8301        inlined_test_context()->ReturnValue(implicit_return_value);
8302      } else if (call_context()->IsEffect()) {
8303        Goto(function_return(), state);
8304      } else {
8305        DCHECK(call_context()->IsValue());
8306        AddLeaveInlined(implicit_return_value, state);
8307      }
8308    } else {
8309      // Falling off the end of a normal inlined function. This basically means
8310      // returning undefined.
8311      if (call_context()->IsTest()) {
8312        inlined_test_context()->ReturnValue(graph()->GetConstantFalse());
8313      } else if (call_context()->IsEffect()) {
8314        Goto(function_return(), state);
8315      } else {
8316        DCHECK(call_context()->IsValue());
8317        AddLeaveInlined(undefined, state);
8318      }
8319    }
8320  }
8321
8322  // Fix up the function exits.
8323  if (inlined_test_context() != NULL) {
8324    HBasicBlock* if_true = inlined_test_context()->if_true();
8325    HBasicBlock* if_false = inlined_test_context()->if_false();
8326
8327    HEnterInlined* entry = function_state()->entry();
8328
8329    // Pop the return test context from the expression context stack.
8330    DCHECK(ast_context() == inlined_test_context());
8331    ClearInlinedTestContext();
8332    delete target_state;
8333
8334    // Forward to the real test context.
8335    if (if_true->HasPredecessor()) {
8336      entry->RegisterReturnTarget(if_true, zone());
8337      if_true->SetJoinId(ast_id);
8338      HBasicBlock* true_target = TestContext::cast(ast_context())->if_true();
8339      Goto(if_true, true_target, function_state());
8340    }
8341    if (if_false->HasPredecessor()) {
8342      entry->RegisterReturnTarget(if_false, zone());
8343      if_false->SetJoinId(ast_id);
8344      HBasicBlock* false_target = TestContext::cast(ast_context())->if_false();
8345      Goto(if_false, false_target, function_state());
8346    }
8347    set_current_block(NULL);
8348    return true;
8349
8350  } else if (function_return()->HasPredecessor()) {
8351    function_state()->entry()->RegisterReturnTarget(function_return(), zone());
8352    function_return()->SetJoinId(ast_id);
8353    set_current_block(function_return());
8354  } else {
8355    set_current_block(NULL);
8356  }
8357  delete target_state;
8358  return true;
8359}
8360
8361
8362bool HOptimizedGraphBuilder::TryInlineCall(Call* expr) {
8363  return TryInline(expr->target(), expr->arguments()->length(), NULL,
8364                   expr->id(), expr->ReturnId(), NORMAL_RETURN,
8365                   expr->tail_call_mode());
8366}
8367
8368
8369bool HOptimizedGraphBuilder::TryInlineConstruct(CallNew* expr,
8370                                                HValue* implicit_return_value) {
8371  return TryInline(expr->target(), expr->arguments()->length(),
8372                   implicit_return_value, expr->id(), expr->ReturnId(),
8373                   CONSTRUCT_CALL_RETURN, TailCallMode::kDisallow);
8374}
8375
8376bool HOptimizedGraphBuilder::TryInlineGetter(Handle<Object> getter,
8377                                             Handle<Map> receiver_map,
8378                                             BailoutId ast_id,
8379                                             BailoutId return_id) {
8380  if (TryInlineApiGetter(getter, receiver_map, ast_id)) return true;
8381  if (getter->IsJSFunction()) {
8382    Handle<JSFunction> getter_function = Handle<JSFunction>::cast(getter);
8383    return TryInlineBuiltinGetterCall(getter_function, receiver_map, ast_id) ||
8384           TryInline(getter_function, 0, NULL, ast_id, return_id,
8385                     GETTER_CALL_RETURN, TailCallMode::kDisallow);
8386  }
8387  return false;
8388}
8389
8390bool HOptimizedGraphBuilder::TryInlineSetter(Handle<Object> setter,
8391                                             Handle<Map> receiver_map,
8392                                             BailoutId id,
8393                                             BailoutId assignment_id,
8394                                             HValue* implicit_return_value) {
8395  if (TryInlineApiSetter(setter, receiver_map, id)) return true;
8396  return setter->IsJSFunction() &&
8397         TryInline(Handle<JSFunction>::cast(setter), 1, implicit_return_value,
8398                   id, assignment_id, SETTER_CALL_RETURN,
8399                   TailCallMode::kDisallow);
8400}
8401
8402
8403bool HOptimizedGraphBuilder::TryInlineIndirectCall(Handle<JSFunction> function,
8404                                                   Call* expr,
8405                                                   int arguments_count) {
8406  return TryInline(function, arguments_count, NULL, expr->id(),
8407                   expr->ReturnId(), NORMAL_RETURN, expr->tail_call_mode());
8408}
8409
8410
8411bool HOptimizedGraphBuilder::TryInlineBuiltinFunctionCall(Call* expr) {
8412  if (!expr->target()->shared()->HasBuiltinFunctionId()) return false;
8413  BuiltinFunctionId id = expr->target()->shared()->builtin_function_id();
8414  // We intentionally ignore expr->tail_call_mode() here because builtins
8415  // we inline here do not observe if they were tail called or not.
8416  switch (id) {
8417    case kMathCos:
8418    case kMathExp:
8419    case kMathRound:
8420    case kMathFround:
8421    case kMathFloor:
8422    case kMathAbs:
8423    case kMathSin:
8424    case kMathSqrt:
8425    case kMathLog:
8426    case kMathClz32:
8427      if (expr->arguments()->length() == 1) {
8428        HValue* argument = Pop();
8429        Drop(2);  // Receiver and function.
8430        HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8431        ast_context()->ReturnInstruction(op, expr->id());
8432        return true;
8433      }
8434      break;
8435    case kMathImul:
8436      if (expr->arguments()->length() == 2) {
8437        HValue* right = Pop();
8438        HValue* left = Pop();
8439        Drop(2);  // Receiver and function.
8440        HInstruction* op =
8441            HMul::NewImul(isolate(), zone(), context(), left, right);
8442        ast_context()->ReturnInstruction(op, expr->id());
8443        return true;
8444      }
8445      break;
8446    default:
8447      // Not supported for inlining yet.
8448      break;
8449  }
8450  return false;
8451}
8452
8453
8454// static
8455bool HOptimizedGraphBuilder::IsReadOnlyLengthDescriptor(
8456    Handle<Map> jsarray_map) {
8457  DCHECK(!jsarray_map->is_dictionary_map());
8458  Isolate* isolate = jsarray_map->GetIsolate();
8459  Handle<Name> length_string = isolate->factory()->length_string();
8460  DescriptorArray* descriptors = jsarray_map->instance_descriptors();
8461  int number =
8462      descriptors->SearchWithCache(isolate, *length_string, *jsarray_map);
8463  DCHECK_NE(DescriptorArray::kNotFound, number);
8464  return descriptors->GetDetails(number).IsReadOnly();
8465}
8466
8467
8468// static
8469bool HOptimizedGraphBuilder::CanInlineArrayResizeOperation(
8470    Handle<Map> receiver_map) {
8471  return !receiver_map.is_null() && receiver_map->prototype()->IsJSObject() &&
8472         receiver_map->instance_type() == JS_ARRAY_TYPE &&
8473         IsFastElementsKind(receiver_map->elements_kind()) &&
8474         !receiver_map->is_dictionary_map() && receiver_map->is_extensible() &&
8475         (!receiver_map->is_prototype_map() || receiver_map->is_stable()) &&
8476         !IsReadOnlyLengthDescriptor(receiver_map);
8477}
8478
8479bool HOptimizedGraphBuilder::TryInlineBuiltinGetterCall(
8480    Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id) {
8481  if (!function->shared()->HasBuiltinFunctionId()) return false;
8482  BuiltinFunctionId id = function->shared()->builtin_function_id();
8483
8484  // Try to inline getter calls like DataView.prototype.byteLength/byteOffset
8485  // as operations in the calling function.
8486  switch (id) {
8487    case kDataViewBuffer: {
8488      if (!receiver_map->IsJSDataViewMap()) return false;
8489      HObjectAccess access = HObjectAccess::ForMapAndOffset(
8490          receiver_map, JSDataView::kBufferOffset);
8491      HValue* object = Pop();  // receiver
8492      HInstruction* result = New<HLoadNamedField>(object, object, access);
8493      ast_context()->ReturnInstruction(result, ast_id);
8494      return true;
8495    }
8496    case kDataViewByteLength:
8497    case kDataViewByteOffset: {
8498      if (!receiver_map->IsJSDataViewMap()) return false;
8499      int offset = (id == kDataViewByteLength) ? JSDataView::kByteLengthOffset
8500                                               : JSDataView::kByteOffsetOffset;
8501      HObjectAccess access =
8502          HObjectAccess::ForMapAndOffset(receiver_map, offset);
8503      HValue* object = Pop();  // receiver
8504      HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8505      HInstruction* result =
8506          New<HLoadNamedField>(object, checked_object, access);
8507      ast_context()->ReturnInstruction(result, ast_id);
8508      return true;
8509    }
8510    case kTypedArrayByteLength:
8511    case kTypedArrayByteOffset:
8512    case kTypedArrayLength: {
8513      if (!receiver_map->IsJSTypedArrayMap()) return false;
8514      int offset = (id == kTypedArrayLength)
8515                       ? JSTypedArray::kLengthOffset
8516                       : (id == kTypedArrayByteLength)
8517                             ? JSTypedArray::kByteLengthOffset
8518                             : JSTypedArray::kByteOffsetOffset;
8519      HObjectAccess access =
8520          HObjectAccess::ForMapAndOffset(receiver_map, offset);
8521      HValue* object = Pop();  // receiver
8522      HValue* checked_object = Add<HCheckArrayBufferNotNeutered>(object);
8523      HInstruction* result =
8524          New<HLoadNamedField>(object, checked_object, access);
8525      ast_context()->ReturnInstruction(result, ast_id);
8526      return true;
8527    }
8528    default:
8529      return false;
8530  }
8531}
8532
8533// static
8534bool HOptimizedGraphBuilder::NoElementsInPrototypeChain(
8535    Handle<Map> receiver_map) {
8536  // TODO(ishell): remove this once we support NO_ELEMENTS elements kind.
8537  PrototypeIterator iter(receiver_map);
8538  Handle<Object> empty_fixed_array =
8539      iter.isolate()->factory()->empty_fixed_array();
8540  while (true) {
8541    Handle<JSObject> current = PrototypeIterator::GetCurrent<JSObject>(iter);
8542    if (current->elements() != *empty_fixed_array) return false;
8543    iter.Advance();
8544    if (iter.IsAtEnd()) {
8545      return true;
8546    }
8547  }
8548}
8549
8550bool HOptimizedGraphBuilder::TryInlineBuiltinMethodCall(
8551    Handle<JSFunction> function, Handle<Map> receiver_map, BailoutId ast_id,
8552    int args_count_no_receiver) {
8553  if (!function->shared()->HasBuiltinFunctionId()) return false;
8554  BuiltinFunctionId id = function->shared()->builtin_function_id();
8555  int argument_count = args_count_no_receiver + 1;  // Plus receiver.
8556
8557  if (receiver_map.is_null()) {
8558    HValue* receiver = environment()->ExpressionStackAt(args_count_no_receiver);
8559    if (receiver->IsConstant() &&
8560        HConstant::cast(receiver)->handle(isolate())->IsHeapObject()) {
8561      receiver_map =
8562          handle(Handle<HeapObject>::cast(
8563                     HConstant::cast(receiver)->handle(isolate()))->map());
8564    }
8565  }
8566  // Try to inline calls like Math.* as operations in the calling function.
8567  switch (id) {
8568    case kObjectHasOwnProperty: {
8569      // It's not safe to look through the phi for elements if we're compiling
8570      // for osr.
8571      if (top_info()->is_osr()) return false;
8572      if (argument_count != 2) return false;
8573      HValue* key = Top();
8574      if (!key->IsLoadKeyed()) return false;
8575      HValue* elements = HLoadKeyed::cast(key)->elements();
8576      if (!elements->IsPhi() || elements->OperandCount() != 1) return false;
8577      if (!elements->OperandAt(0)->IsForInCacheArray()) return false;
8578      HForInCacheArray* cache = HForInCacheArray::cast(elements->OperandAt(0));
8579      HValue* receiver = environment()->ExpressionStackAt(1);
8580      if (!receiver->IsPhi() || receiver->OperandCount() != 1) return false;
8581      if (cache->enumerable() != receiver->OperandAt(0)) return false;
8582      Drop(3);  // key, receiver, function
8583      Add<HCheckMapValue>(receiver, cache->map());
8584      ast_context()->ReturnValue(graph()->GetConstantTrue());
8585      return true;
8586    }
8587    case kStringCharCodeAt:
8588    case kStringCharAt:
8589      if (argument_count == 2) {
8590        HValue* index = Pop();
8591        HValue* string = Pop();
8592        Drop(1);  // Function.
8593        HInstruction* char_code =
8594            BuildStringCharCodeAt(string, index);
8595        if (id == kStringCharCodeAt) {
8596          ast_context()->ReturnInstruction(char_code, ast_id);
8597          return true;
8598        }
8599        AddInstruction(char_code);
8600        HInstruction* result = NewUncasted<HStringCharFromCode>(char_code);
8601        ast_context()->ReturnInstruction(result, ast_id);
8602        return true;
8603      }
8604      break;
8605    case kStringFromCharCode:
8606      if (argument_count == 2) {
8607        HValue* argument = Pop();
8608        Drop(2);  // Receiver and function.
8609        argument = AddUncasted<HForceRepresentation>(
8610            argument, Representation::Integer32());
8611        argument->SetFlag(HValue::kTruncatingToInt32);
8612        HInstruction* result = NewUncasted<HStringCharFromCode>(argument);
8613        ast_context()->ReturnInstruction(result, ast_id);
8614        return true;
8615      }
8616      break;
8617    case kMathCos:
8618    case kMathExp:
8619    case kMathRound:
8620    case kMathFround:
8621    case kMathFloor:
8622    case kMathAbs:
8623    case kMathSin:
8624    case kMathSqrt:
8625    case kMathLog:
8626    case kMathClz32:
8627      if (argument_count == 2) {
8628        HValue* argument = Pop();
8629        Drop(2);  // Receiver and function.
8630        HInstruction* op = NewUncasted<HUnaryMathOperation>(argument, id);
8631        ast_context()->ReturnInstruction(op, ast_id);
8632        return true;
8633      }
8634      break;
8635    case kMathPow:
8636      if (argument_count == 3) {
8637        HValue* right = Pop();
8638        HValue* left = Pop();
8639        Drop(2);  // Receiver and function.
8640        HInstruction* result = NULL;
8641        // Use sqrt() if exponent is 0.5 or -0.5.
8642        if (right->IsConstant() && HConstant::cast(right)->HasDoubleValue()) {
8643          double exponent = HConstant::cast(right)->DoubleValue();
8644          if (exponent == 0.5) {
8645            result = NewUncasted<HUnaryMathOperation>(left, kMathPowHalf);
8646          } else if (exponent == -0.5) {
8647            HValue* one = graph()->GetConstant1();
8648            HInstruction* sqrt = AddUncasted<HUnaryMathOperation>(
8649                left, kMathPowHalf);
8650            // MathPowHalf doesn't have side effects so there's no need for
8651            // an environment simulation here.
8652            DCHECK(!sqrt->HasObservableSideEffects());
8653            result = NewUncasted<HDiv>(one, sqrt);
8654          } else if (exponent == 2.0) {
8655            result = NewUncasted<HMul>(left, left);
8656          }
8657        }
8658
8659        if (result == NULL) {
8660          result = NewUncasted<HPower>(left, right);
8661        }
8662        ast_context()->ReturnInstruction(result, ast_id);
8663        return true;
8664      }
8665      break;
8666    case kMathMax:
8667    case kMathMin:
8668      if (argument_count == 3) {
8669        HValue* right = Pop();
8670        HValue* left = Pop();
8671        Drop(2);  // Receiver and function.
8672        HMathMinMax::Operation op = (id == kMathMin) ? HMathMinMax::kMathMin
8673                                                     : HMathMinMax::kMathMax;
8674        HInstruction* result = NewUncasted<HMathMinMax>(left, right, op);
8675        ast_context()->ReturnInstruction(result, ast_id);
8676        return true;
8677      }
8678      break;
8679    case kMathImul:
8680      if (argument_count == 3) {
8681        HValue* right = Pop();
8682        HValue* left = Pop();
8683        Drop(2);  // Receiver and function.
8684        HInstruction* result =
8685            HMul::NewImul(isolate(), zone(), context(), left, right);
8686        ast_context()->ReturnInstruction(result, ast_id);
8687        return true;
8688      }
8689      break;
8690    case kArrayPop: {
8691      if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8692      ElementsKind elements_kind = receiver_map->elements_kind();
8693
8694      Drop(args_count_no_receiver);
8695      HValue* result;
8696      HValue* reduced_length;
8697      HValue* receiver = Pop();
8698
8699      HValue* checked_object = AddCheckMap(receiver, receiver_map);
8700      HValue* length =
8701          Add<HLoadNamedField>(checked_object, nullptr,
8702                               HObjectAccess::ForArrayLength(elements_kind));
8703
8704      Drop(1);  // Function.
8705
8706      { NoObservableSideEffectsScope scope(this);
8707        IfBuilder length_checker(this);
8708
8709        HValue* bounds_check = length_checker.If<HCompareNumericAndBranch>(
8710            length, graph()->GetConstant0(), Token::EQ);
8711        length_checker.Then();
8712
8713        if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8714
8715        length_checker.Else();
8716        HValue* elements = AddLoadElements(checked_object);
8717        // Ensure that we aren't popping from a copy-on-write array.
8718        if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8719          elements = BuildCopyElementsOnWrite(checked_object, elements,
8720                                              elements_kind, length);
8721        }
8722        reduced_length = AddUncasted<HSub>(length, graph()->GetConstant1());
8723        result = AddElementAccess(elements, reduced_length, nullptr,
8724                                  bounds_check, nullptr, elements_kind, LOAD);
8725        HValue* hole = IsFastSmiOrObjectElementsKind(elements_kind)
8726                           ? graph()->GetConstantHole()
8727                           : Add<HConstant>(HConstant::kHoleNaN);
8728        if (IsFastSmiOrObjectElementsKind(elements_kind)) {
8729          elements_kind = FAST_HOLEY_ELEMENTS;
8730        }
8731        AddElementAccess(elements, reduced_length, hole, bounds_check, nullptr,
8732                         elements_kind, STORE);
8733        Add<HStoreNamedField>(
8734            checked_object, HObjectAccess::ForArrayLength(elements_kind),
8735            reduced_length, STORE_TO_INITIALIZED_ENTRY);
8736
8737        if (!ast_context()->IsEffect()) Push(result);
8738
8739        length_checker.End();
8740      }
8741      result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8742      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8743      if (!ast_context()->IsEffect()) Drop(1);
8744
8745      ast_context()->ReturnValue(result);
8746      return true;
8747    }
8748    case kArrayPush: {
8749      if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8750      ElementsKind elements_kind = receiver_map->elements_kind();
8751
8752      // If there may be elements accessors in the prototype chain, the fast
8753      // inlined version can't be used.
8754      if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8755      // If there currently can be no elements accessors on the prototype chain,
8756      // it doesn't mean that there won't be any later. Install a full prototype
8757      // chain check to trap element accessors being installed on the prototype
8758      // chain, which would cause elements to go to dictionary mode and result
8759      // in a map change.
8760      Handle<JSObject> prototype(JSObject::cast(receiver_map->prototype()));
8761      BuildCheckPrototypeMaps(prototype, Handle<JSObject>());
8762
8763      // Protect against adding elements to the Array prototype, which needs to
8764      // route through appropriate bottlenecks.
8765      if (isolate()->IsFastArrayConstructorPrototypeChainIntact() &&
8766          !prototype->IsJSArray()) {
8767        return false;
8768      }
8769
8770      const int argc = args_count_no_receiver;
8771      if (argc != 1) return false;
8772
8773      HValue* value_to_push = Pop();
8774      HValue* array = Pop();
8775      Drop(1);  // Drop function.
8776
8777      HInstruction* new_size = NULL;
8778      HValue* length = NULL;
8779
8780      {
8781        NoObservableSideEffectsScope scope(this);
8782
8783        length = Add<HLoadNamedField>(
8784            array, nullptr, HObjectAccess::ForArrayLength(elements_kind));
8785
8786        new_size = AddUncasted<HAdd>(length, graph()->GetConstant1());
8787
8788        bool is_array = receiver_map->instance_type() == JS_ARRAY_TYPE;
8789        HValue* checked_array = Add<HCheckMaps>(array, receiver_map);
8790        BuildUncheckedMonomorphicElementAccess(
8791            checked_array, length, value_to_push, is_array, elements_kind,
8792            STORE, NEVER_RETURN_HOLE, STORE_AND_GROW_NO_TRANSITION);
8793
8794        if (!ast_context()->IsEffect()) Push(new_size);
8795        Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8796        if (!ast_context()->IsEffect()) Drop(1);
8797      }
8798
8799      ast_context()->ReturnValue(new_size);
8800      return true;
8801    }
8802    case kArrayShift: {
8803      if (!CanInlineArrayResizeOperation(receiver_map)) return false;
8804      if (!NoElementsInPrototypeChain(receiver_map)) return false;
8805      ElementsKind kind = receiver_map->elements_kind();
8806
8807      // If there may be elements accessors in the prototype chain, the fast
8808      // inlined version can't be used.
8809      if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8810
8811      // If there currently can be no elements accessors on the prototype chain,
8812      // it doesn't mean that there won't be any later. Install a full prototype
8813      // chain check to trap element accessors being installed on the prototype
8814      // chain, which would cause elements to go to dictionary mode and result
8815      // in a map change.
8816      BuildCheckPrototypeMaps(
8817          handle(JSObject::cast(receiver_map->prototype()), isolate()),
8818          Handle<JSObject>::null(), true);
8819
8820      // Threshold for fast inlined Array.shift().
8821      HConstant* inline_threshold = Add<HConstant>(static_cast<int32_t>(16));
8822
8823      Drop(args_count_no_receiver);
8824      HValue* result;
8825      HValue* receiver = Pop();
8826      HValue* checked_object = AddCheckMap(receiver, receiver_map);
8827      HValue* length = Add<HLoadNamedField>(
8828          receiver, checked_object, HObjectAccess::ForArrayLength(kind));
8829
8830      Drop(1);  // Function.
8831      {
8832        NoObservableSideEffectsScope scope(this);
8833
8834        IfBuilder if_lengthiszero(this);
8835        HValue* lengthiszero = if_lengthiszero.If<HCompareNumericAndBranch>(
8836            length, graph()->GetConstant0(), Token::EQ);
8837        if_lengthiszero.Then();
8838        {
8839          if (!ast_context()->IsEffect()) Push(graph()->GetConstantUndefined());
8840        }
8841        if_lengthiszero.Else();
8842        {
8843          HValue* elements = AddLoadElements(receiver);
8844
8845          // Check if we can use the fast inlined Array.shift().
8846          IfBuilder if_inline(this);
8847          if_inline.If<HCompareNumericAndBranch>(
8848              length, inline_threshold, Token::LTE);
8849          if (IsFastSmiOrObjectElementsKind(kind)) {
8850            // We cannot handle copy-on-write backing stores here.
8851            if_inline.AndIf<HCompareMap>(
8852                elements, isolate()->factory()->fixed_array_map());
8853          }
8854          if_inline.Then();
8855          {
8856            // Remember the result.
8857            if (!ast_context()->IsEffect()) {
8858              Push(AddElementAccess(elements, graph()->GetConstant0(), nullptr,
8859                                    lengthiszero, nullptr, kind, LOAD));
8860            }
8861
8862            // Compute the new length.
8863            HValue* new_length = AddUncasted<HSub>(
8864                length, graph()->GetConstant1());
8865            new_length->ClearFlag(HValue::kCanOverflow);
8866
8867            // Copy the remaining elements.
8868            LoopBuilder loop(this, context(), LoopBuilder::kPostIncrement);
8869            {
8870              HValue* new_key = loop.BeginBody(
8871                  graph()->GetConstant0(), new_length, Token::LT);
8872              HValue* key = AddUncasted<HAdd>(new_key, graph()->GetConstant1());
8873              key->ClearFlag(HValue::kCanOverflow);
8874              ElementsKind copy_kind =
8875                  kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
8876              HValue* element =
8877                  AddUncasted<HLoadKeyed>(elements, key, lengthiszero, nullptr,
8878                                          copy_kind, ALLOW_RETURN_HOLE);
8879              HStoreKeyed* store = Add<HStoreKeyed>(elements, new_key, element,
8880                                                    nullptr, copy_kind);
8881              store->SetFlag(HValue::kTruncatingToNumber);
8882            }
8883            loop.EndBody();
8884
8885            // Put a hole at the end.
8886            HValue* hole = IsFastSmiOrObjectElementsKind(kind)
8887                               ? graph()->GetConstantHole()
8888                               : Add<HConstant>(HConstant::kHoleNaN);
8889            if (IsFastSmiOrObjectElementsKind(kind)) kind = FAST_HOLEY_ELEMENTS;
8890            Add<HStoreKeyed>(elements, new_length, hole, nullptr, kind,
8891                             INITIALIZING_STORE);
8892
8893            // Remember new length.
8894            Add<HStoreNamedField>(
8895                receiver, HObjectAccess::ForArrayLength(kind),
8896                new_length, STORE_TO_INITIALIZED_ENTRY);
8897          }
8898          if_inline.Else();
8899          {
8900            Add<HPushArguments>(receiver);
8901            result = AddInstruction(NewCallConstantFunction(
8902                function, 1, TailCallMode::kDisallow, TailCallMode::kDisallow));
8903            if (!ast_context()->IsEffect()) Push(result);
8904          }
8905          if_inline.End();
8906        }
8907        if_lengthiszero.End();
8908      }
8909      result = ast_context()->IsEffect() ? graph()->GetConstant0() : Top();
8910      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8911      if (!ast_context()->IsEffect()) Drop(1);
8912      ast_context()->ReturnValue(result);
8913      return true;
8914    }
8915    case kArrayIndexOf:
8916    case kArrayLastIndexOf: {
8917      if (receiver_map.is_null()) return false;
8918      if (receiver_map->instance_type() != JS_ARRAY_TYPE) return false;
8919      if (!receiver_map->prototype()->IsJSObject()) return false;
8920      ElementsKind kind = receiver_map->elements_kind();
8921      if (!IsFastElementsKind(kind)) return false;
8922      if (argument_count != 2) return false;
8923      if (!receiver_map->is_extensible()) return false;
8924
8925      // If there may be elements accessors in the prototype chain, the fast
8926      // inlined version can't be used.
8927      if (receiver_map->DictionaryElementsInPrototypeChainOnly()) return false;
8928
8929      // If there currently can be no elements accessors on the prototype chain,
8930      // it doesn't mean that there won't be any later. Install a full prototype
8931      // chain check to trap element accessors being installed on the prototype
8932      // chain, which would cause elements to go to dictionary mode and result
8933      // in a map change.
8934      BuildCheckPrototypeMaps(
8935          handle(JSObject::cast(receiver_map->prototype()), isolate()),
8936          Handle<JSObject>::null());
8937
8938      HValue* search_element = Pop();
8939      HValue* receiver = Pop();
8940      Drop(1);  // Drop function.
8941
8942      ArrayIndexOfMode mode = (id == kArrayIndexOf)
8943          ? kFirstIndexOf : kLastIndexOf;
8944      HValue* index = BuildArrayIndexOf(receiver, search_element, kind, mode);
8945
8946      if (!ast_context()->IsEffect()) Push(index);
8947      Add<HSimulate>(ast_id, REMOVABLE_SIMULATE);
8948      if (!ast_context()->IsEffect()) Drop(1);
8949      ast_context()->ReturnValue(index);
8950      return true;
8951    }
8952    default:
8953      // Not yet supported for inlining.
8954      break;
8955  }
8956  return false;
8957}
8958
8959
8960bool HOptimizedGraphBuilder::TryInlineApiFunctionCall(Call* expr,
8961                                                      HValue* receiver) {
8962  if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8963  Handle<JSFunction> function = expr->target();
8964  int argc = expr->arguments()->length();
8965  SmallMapList receiver_maps;
8966  return TryInlineApiCall(function, receiver, &receiver_maps, argc, expr->id(),
8967                          kCallApiFunction, expr->tail_call_mode());
8968}
8969
8970
8971bool HOptimizedGraphBuilder::TryInlineApiMethodCall(
8972    Call* expr,
8973    HValue* receiver,
8974    SmallMapList* receiver_maps) {
8975  if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8976  Handle<JSFunction> function = expr->target();
8977  int argc = expr->arguments()->length();
8978  return TryInlineApiCall(function, receiver, receiver_maps, argc, expr->id(),
8979                          kCallApiMethod, expr->tail_call_mode());
8980}
8981
8982bool HOptimizedGraphBuilder::TryInlineApiGetter(Handle<Object> function,
8983                                                Handle<Map> receiver_map,
8984                                                BailoutId ast_id) {
8985  if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
8986  SmallMapList receiver_maps(1, zone());
8987  receiver_maps.Add(receiver_map, zone());
8988  return TryInlineApiCall(function,
8989                          NULL,  // Receiver is on expression stack.
8990                          &receiver_maps, 0, ast_id, kCallApiGetter,
8991                          TailCallMode::kDisallow);
8992}
8993
8994bool HOptimizedGraphBuilder::TryInlineApiSetter(Handle<Object> function,
8995                                                Handle<Map> receiver_map,
8996                                                BailoutId ast_id) {
8997  SmallMapList receiver_maps(1, zone());
8998  receiver_maps.Add(receiver_map, zone());
8999  return TryInlineApiCall(function,
9000                          NULL,  // Receiver is on expression stack.
9001                          &receiver_maps, 1, ast_id, kCallApiSetter,
9002                          TailCallMode::kDisallow);
9003}
9004
9005bool HOptimizedGraphBuilder::TryInlineApiCall(
9006    Handle<Object> function, HValue* receiver, SmallMapList* receiver_maps,
9007    int argc, BailoutId ast_id, ApiCallType call_type,
9008    TailCallMode syntactic_tail_call_mode) {
9009  if (V8_UNLIKELY(FLAG_runtime_stats)) return false;
9010  if (function->IsJSFunction() &&
9011      Handle<JSFunction>::cast(function)->context()->native_context() !=
9012          top_info()->closure()->context()->native_context()) {
9013    return false;
9014  }
9015  if (argc > CallApiCallbackStub::kArgMax) {
9016    return false;
9017  }
9018
9019  CallOptimization optimization(function);
9020  if (!optimization.is_simple_api_call()) return false;
9021  Handle<Map> holder_map;
9022  for (int i = 0; i < receiver_maps->length(); ++i) {
9023    auto map = receiver_maps->at(i);
9024    // Don't inline calls to receivers requiring accesschecks.
9025    if (map->is_access_check_needed()) return false;
9026  }
9027  if (call_type == kCallApiFunction) {
9028    // Cannot embed a direct reference to the global proxy map
9029    // as it maybe dropped on deserialization.
9030    CHECK(!isolate()->serializer_enabled());
9031    DCHECK(function->IsJSFunction());
9032    DCHECK_EQ(0, receiver_maps->length());
9033    receiver_maps->Add(
9034        handle(Handle<JSFunction>::cast(function)->global_proxy()->map()),
9035        zone());
9036  }
9037  CallOptimization::HolderLookup holder_lookup =
9038      CallOptimization::kHolderNotFound;
9039  Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
9040      receiver_maps->first(), &holder_lookup);
9041  if (holder_lookup == CallOptimization::kHolderNotFound) return false;
9042
9043  if (FLAG_trace_inlining) {
9044    PrintF("Inlining api function ");
9045    function->ShortPrint();
9046    PrintF("\n");
9047  }
9048
9049  bool is_function = false;
9050  bool is_store = false;
9051  switch (call_type) {
9052    case kCallApiFunction:
9053    case kCallApiMethod:
9054      // Need to check that none of the receiver maps could have changed.
9055      Add<HCheckMaps>(receiver, receiver_maps);
9056      // Need to ensure the chain between receiver and api_holder is intact.
9057      if (holder_lookup == CallOptimization::kHolderFound) {
9058        AddCheckPrototypeMaps(api_holder, receiver_maps->first());
9059      } else {
9060        DCHECK_EQ(holder_lookup, CallOptimization::kHolderIsReceiver);
9061      }
9062      // Includes receiver.
9063      PushArgumentsFromEnvironment(argc + 1);
9064      is_function = true;
9065      break;
9066    case kCallApiGetter:
9067      // Receiver and prototype chain cannot have changed.
9068      DCHECK_EQ(0, argc);
9069      DCHECK_NULL(receiver);
9070      // Receiver is on expression stack.
9071      receiver = Pop();
9072      Add<HPushArguments>(receiver);
9073      break;
9074    case kCallApiSetter:
9075      {
9076        is_store = true;
9077        // Receiver and prototype chain cannot have changed.
9078        DCHECK_EQ(1, argc);
9079        DCHECK_NULL(receiver);
9080        // Receiver and value are on expression stack.
9081        HValue* value = Pop();
9082        receiver = Pop();
9083        Add<HPushArguments>(receiver, value);
9084        break;
9085     }
9086  }
9087
9088  HValue* holder = NULL;
9089  switch (holder_lookup) {
9090    case CallOptimization::kHolderFound:
9091      holder = Add<HConstant>(api_holder);
9092      break;
9093    case CallOptimization::kHolderIsReceiver:
9094      holder = receiver;
9095      break;
9096    case CallOptimization::kHolderNotFound:
9097      UNREACHABLE();
9098      break;
9099  }
9100  Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
9101  Handle<Object> call_data_obj(api_call_info->data(), isolate());
9102  bool call_data_undefined = call_data_obj->IsUndefined(isolate());
9103  HValue* call_data = Add<HConstant>(call_data_obj);
9104  ApiFunction fun(v8::ToCData<Address>(api_call_info->callback()));
9105  ExternalReference ref = ExternalReference(&fun,
9106                                            ExternalReference::DIRECT_API_CALL,
9107                                            isolate());
9108  HValue* api_function_address = Add<HConstant>(ExternalReference(ref));
9109
9110  HValue* op_vals[] = {Add<HConstant>(function), call_data, holder,
9111                       api_function_address};
9112
9113  HInstruction* call = nullptr;
9114  CHECK(argc <= CallApiCallbackStub::kArgMax);
9115  if (!is_function) {
9116    CallApiCallbackStub stub(isolate(), is_store, call_data_undefined,
9117                             !optimization.is_constant_call());
9118    Handle<Code> code = stub.GetCode();
9119    HConstant* code_value = Add<HConstant>(code);
9120    call = New<HCallWithDescriptor>(
9121        code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9122        Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
9123  } else {
9124    CallApiCallbackStub stub(isolate(), argc, call_data_undefined, false);
9125    Handle<Code> code = stub.GetCode();
9126    HConstant* code_value = Add<HConstant>(code);
9127    call = New<HCallWithDescriptor>(
9128        code_value, argc + 1, stub.GetCallInterfaceDescriptor(),
9129        Vector<HValue*>(op_vals, arraysize(op_vals)), syntactic_tail_call_mode);
9130    Drop(1);  // Drop function.
9131  }
9132
9133  ast_context()->ReturnInstruction(call, ast_id);
9134  return true;
9135}
9136
9137
9138void HOptimizedGraphBuilder::HandleIndirectCall(Call* expr, HValue* function,
9139                                                int arguments_count) {
9140  Handle<JSFunction> known_function;
9141  int args_count_no_receiver = arguments_count - 1;
9142  if (function->IsConstant() &&
9143      HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9144    known_function =
9145        Handle<JSFunction>::cast(HConstant::cast(function)->handle(isolate()));
9146    if (TryInlineBuiltinMethodCall(known_function, Handle<Map>(), expr->id(),
9147                                   args_count_no_receiver)) {
9148      if (FLAG_trace_inlining) {
9149        PrintF("Inlining builtin ");
9150        known_function->ShortPrint();
9151        PrintF("\n");
9152      }
9153      return;
9154    }
9155
9156    if (TryInlineIndirectCall(known_function, expr, args_count_no_receiver)) {
9157      return;
9158    }
9159  }
9160
9161  TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9162  TailCallMode tail_call_mode =
9163      function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9164
9165  PushArgumentsFromEnvironment(arguments_count);
9166  HInvokeFunction* call =
9167      New<HInvokeFunction>(function, known_function, arguments_count,
9168                           syntactic_tail_call_mode, tail_call_mode);
9169  Drop(1);  // Function
9170  ast_context()->ReturnInstruction(call, expr->id());
9171}
9172
9173
9174bool HOptimizedGraphBuilder::TryIndirectCall(Call* expr) {
9175  DCHECK(expr->expression()->IsProperty());
9176
9177  if (!expr->IsMonomorphic()) {
9178    return false;
9179  }
9180  Handle<Map> function_map = expr->GetReceiverTypes()->first();
9181  if (function_map->instance_type() != JS_FUNCTION_TYPE ||
9182      !expr->target()->shared()->HasBuiltinFunctionId()) {
9183    return false;
9184  }
9185
9186  switch (expr->target()->shared()->builtin_function_id()) {
9187    case kFunctionCall: {
9188      if (expr->arguments()->length() == 0) return false;
9189      BuildFunctionCall(expr);
9190      return true;
9191    }
9192    case kFunctionApply: {
9193      // For .apply, only the pattern f.apply(receiver, arguments)
9194      // is supported.
9195      if (!CanBeFunctionApplyArguments(expr)) return false;
9196
9197      BuildFunctionApply(expr);
9198      return true;
9199    }
9200    default: { return false; }
9201  }
9202  UNREACHABLE();
9203}
9204
9205
9206// f.apply(...)
9207void HOptimizedGraphBuilder::BuildFunctionApply(Call* expr) {
9208  ZoneList<Expression*>* args = expr->arguments();
9209  CHECK_ALIVE(VisitForValue(args->at(0)));
9210  HValue* receiver = Pop();  // receiver
9211  HValue* function = Pop();  // f
9212  Drop(1);  // apply
9213
9214  // Make sure the arguments object is live.
9215  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9216  LookupAndMakeLive(arg_two->var());
9217
9218  Handle<Map> function_map = expr->GetReceiverTypes()->first();
9219  HValue* checked_function = AddCheckMap(function, function_map);
9220
9221  if (function_state()->outer() == NULL) {
9222    TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9223    TailCallMode tail_call_mode =
9224        function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9225
9226    HInstruction* elements = Add<HArgumentsElements>(false);
9227    HInstruction* length = Add<HArgumentsLength>(elements);
9228    HValue* wrapped_receiver = BuildWrapReceiver(receiver, checked_function);
9229    HInstruction* result = New<HApplyArguments>(
9230        function, wrapped_receiver, length, elements, tail_call_mode);
9231    ast_context()->ReturnInstruction(result, expr->id());
9232  } else {
9233    // We are inside inlined function and we know exactly what is inside
9234    // arguments object. But we need to be able to materialize at deopt.
9235    DCHECK_EQ(environment()->arguments_environment()->parameter_count(),
9236              function_state()->entry()->arguments_object()->arguments_count());
9237    HArgumentsObject* args = function_state()->entry()->arguments_object();
9238    const ZoneList<HValue*>* arguments_values = args->arguments_values();
9239    int arguments_count = arguments_values->length();
9240    Push(function);
9241    Push(BuildWrapReceiver(receiver, checked_function));
9242    for (int i = 1; i < arguments_count; i++) {
9243      Push(arguments_values->at(i));
9244    }
9245    HandleIndirectCall(expr, function, arguments_count);
9246  }
9247}
9248
9249
9250// f.call(...)
9251void HOptimizedGraphBuilder::BuildFunctionCall(Call* expr) {
9252  HValue* function = Top();  // f
9253  Handle<Map> function_map = expr->GetReceiverTypes()->first();
9254  HValue* checked_function = AddCheckMap(function, function_map);
9255
9256  // f and call are on the stack in the unoptimized code
9257  // during evaluation of the arguments.
9258  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9259
9260  int args_length = expr->arguments()->length();
9261  int receiver_index = args_length - 1;
9262  // Patch the receiver.
9263  HValue* receiver = BuildWrapReceiver(
9264      environment()->ExpressionStackAt(receiver_index), checked_function);
9265  environment()->SetExpressionStackAt(receiver_index, receiver);
9266
9267  // Call must not be on the stack from now on.
9268  int call_index = args_length + 1;
9269  environment()->RemoveExpressionStackAt(call_index);
9270
9271  HandleIndirectCall(expr, function, args_length);
9272}
9273
9274
9275HValue* HOptimizedGraphBuilder::ImplicitReceiverFor(HValue* function,
9276                                                    Handle<JSFunction> target) {
9277  SharedFunctionInfo* shared = target->shared();
9278  if (is_sloppy(shared->language_mode()) && !shared->native()) {
9279    // Cannot embed a direct reference to the global proxy
9280    // as is it dropped on deserialization.
9281    CHECK(!isolate()->serializer_enabled());
9282    Handle<JSObject> global_proxy(target->context()->global_proxy());
9283    return Add<HConstant>(global_proxy);
9284  }
9285  return graph()->GetConstantUndefined();
9286}
9287
9288
9289HValue* HOptimizedGraphBuilder::BuildArrayIndexOf(HValue* receiver,
9290                                                  HValue* search_element,
9291                                                  ElementsKind kind,
9292                                                  ArrayIndexOfMode mode) {
9293  DCHECK(IsFastElementsKind(kind));
9294
9295  NoObservableSideEffectsScope no_effects(this);
9296
9297  HValue* elements = AddLoadElements(receiver);
9298  HValue* length = AddLoadArrayLength(receiver, kind);
9299
9300  HValue* initial;
9301  HValue* terminating;
9302  Token::Value token;
9303  LoopBuilder::Direction direction;
9304  if (mode == kFirstIndexOf) {
9305    initial = graph()->GetConstant0();
9306    terminating = length;
9307    token = Token::LT;
9308    direction = LoopBuilder::kPostIncrement;
9309  } else {
9310    DCHECK_EQ(kLastIndexOf, mode);
9311    initial = length;
9312    terminating = graph()->GetConstant0();
9313    token = Token::GT;
9314    direction = LoopBuilder::kPreDecrement;
9315  }
9316
9317  Push(graph()->GetConstantMinus1());
9318  if (IsFastDoubleElementsKind(kind) || IsFastSmiElementsKind(kind)) {
9319    // Make sure that we can actually compare numbers correctly below, see
9320    // https://code.google.com/p/chromium/issues/detail?id=407946 for details.
9321    search_element = AddUncasted<HForceRepresentation>(
9322        search_element, IsFastSmiElementsKind(kind) ? Representation::Smi()
9323                                                    : Representation::Double());
9324
9325    LoopBuilder loop(this, context(), direction);
9326    {
9327      HValue* index = loop.BeginBody(initial, terminating, token);
9328      HValue* element = AddUncasted<HLoadKeyed>(
9329          elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9330      IfBuilder if_issame(this);
9331      if_issame.If<HCompareNumericAndBranch>(element, search_element,
9332                                             Token::EQ_STRICT);
9333      if_issame.Then();
9334      {
9335        Drop(1);
9336        Push(index);
9337        loop.Break();
9338      }
9339      if_issame.End();
9340    }
9341    loop.EndBody();
9342  } else {
9343    IfBuilder if_isstring(this);
9344    if_isstring.If<HIsStringAndBranch>(search_element);
9345    if_isstring.Then();
9346    {
9347      LoopBuilder loop(this, context(), direction);
9348      {
9349        HValue* index = loop.BeginBody(initial, terminating, token);
9350        HValue* element = AddUncasted<HLoadKeyed>(
9351            elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9352        IfBuilder if_issame(this);
9353        if_issame.If<HIsStringAndBranch>(element);
9354        if_issame.AndIf<HStringCompareAndBranch>(
9355            element, search_element, Token::EQ_STRICT);
9356        if_issame.Then();
9357        {
9358          Drop(1);
9359          Push(index);
9360          loop.Break();
9361        }
9362        if_issame.End();
9363      }
9364      loop.EndBody();
9365    }
9366    if_isstring.Else();
9367    {
9368      IfBuilder if_isnumber(this);
9369      if_isnumber.If<HIsSmiAndBranch>(search_element);
9370      if_isnumber.OrIf<HCompareMap>(
9371          search_element, isolate()->factory()->heap_number_map());
9372      if_isnumber.Then();
9373      {
9374        HValue* search_number =
9375            AddUncasted<HForceRepresentation>(search_element,
9376                                              Representation::Double());
9377        LoopBuilder loop(this, context(), direction);
9378        {
9379          HValue* index = loop.BeginBody(initial, terminating, token);
9380          HValue* element = AddUncasted<HLoadKeyed>(
9381              elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9382
9383          IfBuilder if_element_isnumber(this);
9384          if_element_isnumber.If<HIsSmiAndBranch>(element);
9385          if_element_isnumber.OrIf<HCompareMap>(
9386              element, isolate()->factory()->heap_number_map());
9387          if_element_isnumber.Then();
9388          {
9389            HValue* number =
9390                AddUncasted<HForceRepresentation>(element,
9391                                                  Representation::Double());
9392            IfBuilder if_issame(this);
9393            if_issame.If<HCompareNumericAndBranch>(
9394                number, search_number, Token::EQ_STRICT);
9395            if_issame.Then();
9396            {
9397              Drop(1);
9398              Push(index);
9399              loop.Break();
9400            }
9401            if_issame.End();
9402          }
9403          if_element_isnumber.End();
9404        }
9405        loop.EndBody();
9406      }
9407      if_isnumber.Else();
9408      {
9409        LoopBuilder loop(this, context(), direction);
9410        {
9411          HValue* index = loop.BeginBody(initial, terminating, token);
9412          HValue* element = AddUncasted<HLoadKeyed>(
9413              elements, index, nullptr, nullptr, kind, ALLOW_RETURN_HOLE);
9414          IfBuilder if_issame(this);
9415          if_issame.If<HCompareObjectEqAndBranch>(
9416              element, search_element);
9417          if_issame.Then();
9418          {
9419            Drop(1);
9420            Push(index);
9421            loop.Break();
9422          }
9423          if_issame.End();
9424        }
9425        loop.EndBody();
9426      }
9427      if_isnumber.End();
9428    }
9429    if_isstring.End();
9430  }
9431
9432  return Pop();
9433}
9434
9435template <class T>
9436bool HOptimizedGraphBuilder::TryHandleArrayCall(T* expr, HValue* function) {
9437  if (!array_function().is_identical_to(expr->target())) {
9438    return false;
9439  }
9440
9441  Handle<AllocationSite> site = expr->allocation_site();
9442  if (site.is_null()) return false;
9443
9444  Add<HCheckValue>(function, array_function());
9445
9446  int arguments_count = expr->arguments()->length();
9447  if (TryInlineArrayCall(expr, arguments_count, site)) return true;
9448
9449  HInstruction* call = PreProcessCall(New<HCallNewArray>(
9450      function, arguments_count + 1, site->GetElementsKind(), site));
9451  if (expr->IsCall()) Drop(1);
9452  ast_context()->ReturnInstruction(call, expr->id());
9453
9454  return true;
9455}
9456
9457
9458bool HOptimizedGraphBuilder::CanBeFunctionApplyArguments(Call* expr) {
9459  ZoneList<Expression*>* args = expr->arguments();
9460  if (args->length() != 2) return false;
9461  VariableProxy* arg_two = args->at(1)->AsVariableProxy();
9462  if (arg_two == NULL || !arg_two->var()->IsStackAllocated()) return false;
9463  HValue* arg_two_value = environment()->Lookup(arg_two->var());
9464  if (!arg_two_value->CheckFlag(HValue::kIsArguments)) return false;
9465  DCHECK_NOT_NULL(current_info()->scope()->arguments());
9466  return true;
9467}
9468
9469
9470void HOptimizedGraphBuilder::VisitCall(Call* expr) {
9471  DCHECK(!HasStackOverflow());
9472  DCHECK(current_block() != NULL);
9473  DCHECK(current_block()->HasPredecessor());
9474  if (!is_tracking_positions()) SetSourcePosition(expr->position());
9475  Expression* callee = expr->expression();
9476  int argument_count = expr->arguments()->length() + 1;  // Plus receiver.
9477  HInstruction* call = NULL;
9478
9479  TailCallMode syntactic_tail_call_mode = expr->tail_call_mode();
9480  TailCallMode tail_call_mode =
9481      function_state()->ComputeTailCallMode(syntactic_tail_call_mode);
9482
9483  Property* prop = callee->AsProperty();
9484  if (prop != NULL) {
9485    CHECK_ALIVE(VisitForValue(prop->obj()));
9486    HValue* receiver = Top();
9487
9488    SmallMapList* maps;
9489    ComputeReceiverTypes(expr, receiver, &maps, this);
9490
9491    if (prop->key()->IsPropertyName() && maps->length() > 0) {
9492      Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
9493      PropertyAccessInfo info(this, LOAD, maps->first(), name);
9494      if (!info.CanAccessAsMonomorphic(maps)) {
9495        HandlePolymorphicCallNamed(expr, receiver, maps, name);
9496        return;
9497      }
9498    }
9499    HValue* key = NULL;
9500    if (!prop->key()->IsPropertyName()) {
9501      CHECK_ALIVE(VisitForValue(prop->key()));
9502      key = Pop();
9503    }
9504
9505    CHECK_ALIVE(PushLoad(prop, receiver, key));
9506    HValue* function = Pop();
9507
9508    if (function->IsConstant() &&
9509        HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9510      // Push the function under the receiver.
9511      environment()->SetExpressionStackAt(0, function);
9512      Push(receiver);
9513
9514      Handle<JSFunction> known_function = Handle<JSFunction>::cast(
9515          HConstant::cast(function)->handle(isolate()));
9516      expr->set_target(known_function);
9517
9518      if (TryIndirectCall(expr)) return;
9519      CHECK_ALIVE(VisitExpressions(expr->arguments()));
9520
9521      Handle<Map> map = maps->length() == 1 ? maps->first() : Handle<Map>();
9522      if (TryInlineBuiltinMethodCall(known_function, map, expr->id(),
9523                                     expr->arguments()->length())) {
9524        if (FLAG_trace_inlining) {
9525          PrintF("Inlining builtin ");
9526          known_function->ShortPrint();
9527          PrintF("\n");
9528        }
9529        return;
9530      }
9531      if (TryInlineApiMethodCall(expr, receiver, maps)) return;
9532
9533      // Wrap the receiver if necessary.
9534      if (NeedsWrapping(maps->first(), known_function)) {
9535        // Since HWrapReceiver currently cannot actually wrap numbers and
9536        // strings, use the regular call builtin for method calls to wrap
9537        // the receiver.
9538        // TODO(verwaest): Support creation of value wrappers directly in
9539        // HWrapReceiver.
9540        call = NewCallFunction(
9541            function, argument_count, syntactic_tail_call_mode,
9542            ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode);
9543      } else if (TryInlineCall(expr)) {
9544        return;
9545      } else {
9546        call =
9547            NewCallConstantFunction(known_function, argument_count,
9548                                    syntactic_tail_call_mode, tail_call_mode);
9549      }
9550
9551    } else {
9552      ArgumentsAllowedFlag arguments_flag = ARGUMENTS_NOT_ALLOWED;
9553      if (CanBeFunctionApplyArguments(expr) && expr->is_uninitialized()) {
9554        // We have to use EAGER deoptimization here because Deoptimizer::SOFT
9555        // gets ignored by the always-opt flag, which leads to incorrect code.
9556        Add<HDeoptimize>(
9557            DeoptimizeReason::kInsufficientTypeFeedbackForCallWithArguments,
9558            Deoptimizer::EAGER);
9559        arguments_flag = ARGUMENTS_FAKED;
9560      }
9561
9562      // Push the function under the receiver.
9563      environment()->SetExpressionStackAt(0, function);
9564      Push(receiver);
9565
9566      CHECK_ALIVE(VisitExpressions(expr->arguments(), arguments_flag));
9567      call = NewCallFunction(function, argument_count, syntactic_tail_call_mode,
9568                             ConvertReceiverMode::kNotNullOrUndefined,
9569                             tail_call_mode);
9570    }
9571    PushArgumentsFromEnvironment(argument_count);
9572
9573  } else {
9574    if (expr->is_possibly_eval()) {
9575      return Bailout(kPossibleDirectCallToEval);
9576    }
9577
9578    // The function is on the stack in the unoptimized code during
9579    // evaluation of the arguments.
9580    CHECK_ALIVE(VisitForValue(expr->expression()));
9581    HValue* function = Top();
9582    if (function->IsConstant() &&
9583        HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9584      Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9585      Handle<JSFunction> target = Handle<JSFunction>::cast(constant);
9586      expr->SetKnownGlobalTarget(target);
9587    }
9588
9589    // Placeholder for the receiver.
9590    Push(graph()->GetConstantUndefined());
9591    CHECK_ALIVE(VisitExpressions(expr->arguments()));
9592
9593    if (expr->IsMonomorphic() &&
9594        !IsClassConstructor(expr->target()->shared()->kind())) {
9595      Add<HCheckValue>(function, expr->target());
9596
9597      // Patch the global object on the stack by the expected receiver.
9598      HValue* receiver = ImplicitReceiverFor(function, expr->target());
9599      const int receiver_index = argument_count - 1;
9600      environment()->SetExpressionStackAt(receiver_index, receiver);
9601
9602      if (TryInlineBuiltinFunctionCall(expr)) {
9603        if (FLAG_trace_inlining) {
9604          PrintF("Inlining builtin ");
9605          expr->target()->ShortPrint();
9606          PrintF("\n");
9607        }
9608        return;
9609      }
9610      if (TryInlineApiFunctionCall(expr, receiver)) return;
9611      if (TryHandleArrayCall(expr, function)) return;
9612      if (TryInlineCall(expr)) return;
9613
9614      PushArgumentsFromEnvironment(argument_count);
9615      call = NewCallConstantFunction(expr->target(), argument_count,
9616                                     syntactic_tail_call_mode, tail_call_mode);
9617    } else {
9618      PushArgumentsFromEnvironment(argument_count);
9619      if (expr->is_uninitialized()) {
9620        // We've never seen this call before, so let's have Crankshaft learn
9621        // through the type vector.
9622        call = NewCallFunctionViaIC(function, argument_count,
9623                                    syntactic_tail_call_mode,
9624                                    ConvertReceiverMode::kNullOrUndefined,
9625                                    tail_call_mode, expr->CallFeedbackICSlot());
9626      } else {
9627        call = NewCallFunction(
9628            function, argument_count, syntactic_tail_call_mode,
9629            ConvertReceiverMode::kNullOrUndefined, tail_call_mode);
9630      }
9631    }
9632  }
9633
9634  Drop(1);  // Drop the function.
9635  return ast_context()->ReturnInstruction(call, expr->id());
9636}
9637
9638bool HOptimizedGraphBuilder::TryInlineArrayCall(Expression* expression,
9639                                                int argument_count,
9640                                                Handle<AllocationSite> site) {
9641  Handle<JSFunction> caller = current_info()->closure();
9642  Handle<JSFunction> target = array_function();
9643
9644  if (!site->CanInlineCall()) {
9645    TraceInline(target, caller, "AllocationSite requested no inlining.");
9646    return false;
9647  }
9648
9649  if (argument_count > 1) {
9650    TraceInline(target, caller, "Too many arguments to inline.");
9651    return false;
9652  }
9653
9654  int array_length = 0;
9655  // Do not inline if the constant length argument is not a smi or outside the
9656  // valid range for unrolled loop initialization.
9657  if (argument_count == 1) {
9658    HValue* argument = Top();
9659    if (!argument->IsConstant()) {
9660      TraceInline(target, caller,
9661                  "Dont inline [new] Array(n) where n isn't constant.");
9662      return false;
9663    }
9664
9665    HConstant* constant_argument = HConstant::cast(argument);
9666    if (!constant_argument->HasSmiValue()) {
9667      TraceInline(target, caller,
9668                  "Constant length outside of valid inlining range.");
9669      return false;
9670    }
9671    array_length = constant_argument->Integer32Value();
9672    if (array_length < 0 || array_length > kElementLoopUnrollThreshold) {
9673      TraceInline(target, caller,
9674                  "Constant length outside of valid inlining range.");
9675      return false;
9676    }
9677  }
9678
9679  TraceInline(target, caller, NULL);
9680
9681  NoObservableSideEffectsScope no_effects(this);
9682
9683  // Register on the site for deoptimization if the transition feedback changes.
9684  top_info()->dependencies()->AssumeTransitionStable(site);
9685
9686  // Build the array.
9687  ElementsKind kind = site->GetElementsKind();
9688  HValue* capacity;
9689  HValue* length;
9690  if (array_length == 0) {
9691    STATIC_ASSERT(0 < JSArray::kPreallocatedArrayElements);
9692    const int initial_capacity = JSArray::kPreallocatedArrayElements;
9693    capacity = Add<HConstant>(initial_capacity);
9694    length = graph()->GetConstant0();
9695  } else {
9696    length = Top();
9697    capacity = length;
9698    kind = GetHoleyElementsKind(kind);
9699  }
9700
9701  // These HForceRepresentations are because we store these as fields in the
9702  // objects we construct, and an int32-to-smi HChange could deopt. Accept
9703  // the deopt possibility now, before allocation occurs.
9704  length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9705  capacity = AddUncasted<HForceRepresentation>(capacity, Representation::Smi());
9706
9707  // Generate size calculation code here in order to make it dominate
9708  // the JSArray allocation.
9709  HValue* elements_size = BuildCalculateElementsSize(kind, capacity);
9710
9711  // Bail out for large objects.
9712  HValue* max_size = Add<HConstant>(kMaxRegularHeapObjectSize);
9713  Add<HBoundsCheck>(elements_size, max_size);
9714
9715  // Allocate (dealing with failure appropriately).
9716  AllocationSiteMode mode = DONT_TRACK_ALLOCATION_SITE;
9717  HAllocate* new_object = AllocateJSArrayObject(mode);
9718
9719  // Fill in the fields: map, properties, length.
9720  Handle<Map> map_constant(isolate()->get_initial_js_array_map(kind));
9721  HValue* map = Add<HConstant>(map_constant);
9722
9723  BuildJSArrayHeader(new_object, map,
9724                     nullptr,  // set elements to empty fixed array
9725                     mode, kind, nullptr, length);
9726
9727  // Allocate and initialize the elements.
9728  HAllocate* elements = BuildAllocateElements(kind, elements_size);
9729  BuildInitializeElementsHeader(elements, kind, capacity);
9730  BuildFillElementsWithHole(elements, kind, graph()->GetConstant0(), capacity);
9731
9732  // Set the elements.
9733  Add<HStoreNamedField>(new_object, HObjectAccess::ForElementsPointer(),
9734                        elements);
9735
9736  int args_to_drop = argument_count + (expression->IsCall() ? 2 : 1);
9737  Drop(args_to_drop);
9738  ast_context()->ReturnValue(new_object);
9739  return true;
9740}
9741
9742
9743// Checks whether allocation using the given constructor can be inlined.
9744static bool IsAllocationInlineable(Handle<JSFunction> constructor) {
9745  return constructor->has_initial_map() &&
9746         !IsDerivedConstructor(constructor->shared()->kind()) &&
9747         constructor->initial_map()->instance_type() == JS_OBJECT_TYPE &&
9748         constructor->initial_map()->instance_size() <
9749             HAllocate::kMaxInlineSize;
9750}
9751
9752void HOptimizedGraphBuilder::VisitCallNew(CallNew* expr) {
9753  DCHECK(!HasStackOverflow());
9754  DCHECK(current_block() != NULL);
9755  DCHECK(current_block()->HasPredecessor());
9756  if (!is_tracking_positions()) SetSourcePosition(expr->position());
9757  int argument_count = expr->arguments()->length() + 1;  // Plus constructor.
9758  Factory* factory = isolate()->factory();
9759
9760  // The constructor function is on the stack in the unoptimized code
9761  // during evaluation of the arguments.
9762  CHECK_ALIVE(VisitForValue(expr->expression()));
9763  HValue* function = Top();
9764  CHECK_ALIVE(VisitExpressions(expr->arguments()));
9765
9766  if (function->IsConstant() &&
9767      HConstant::cast(function)->handle(isolate())->IsJSFunction()) {
9768    Handle<Object> constant = HConstant::cast(function)->handle(isolate());
9769    expr->SetKnownGlobalTarget(Handle<JSFunction>::cast(constant));
9770  }
9771
9772  if (FLAG_inline_construct &&
9773      expr->IsMonomorphic() &&
9774      IsAllocationInlineable(expr->target())) {
9775    Handle<JSFunction> constructor = expr->target();
9776    DCHECK(
9777        constructor->shared()->construct_stub() ==
9778            isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric) ||
9779        constructor->shared()->construct_stub() ==
9780            isolate()->builtins()->builtin(Builtins::kJSConstructStubApi));
9781    HValue* check = Add<HCheckValue>(function, constructor);
9782
9783    // Force completion of inobject slack tracking before generating
9784    // allocation code to finalize instance size.
9785    constructor->CompleteInobjectSlackTrackingIfActive();
9786
9787    // Calculate instance size from initial map of constructor.
9788    DCHECK(constructor->has_initial_map());
9789    Handle<Map> initial_map(constructor->initial_map());
9790    int instance_size = initial_map->instance_size();
9791
9792    // Allocate an instance of the implicit receiver object.
9793    HValue* size_in_bytes = Add<HConstant>(instance_size);
9794    HAllocationMode allocation_mode;
9795    HAllocate* receiver = BuildAllocate(
9796        size_in_bytes, HType::JSObject(), JS_OBJECT_TYPE, allocation_mode);
9797    receiver->set_known_initial_map(initial_map);
9798
9799    // Initialize map and fields of the newly allocated object.
9800    { NoObservableSideEffectsScope no_effects(this);
9801      DCHECK(initial_map->instance_type() == JS_OBJECT_TYPE);
9802      Add<HStoreNamedField>(receiver,
9803          HObjectAccess::ForMapAndOffset(initial_map, JSObject::kMapOffset),
9804          Add<HConstant>(initial_map));
9805      HValue* empty_fixed_array = Add<HConstant>(factory->empty_fixed_array());
9806      Add<HStoreNamedField>(receiver,
9807          HObjectAccess::ForMapAndOffset(initial_map,
9808                                         JSObject::kPropertiesOffset),
9809          empty_fixed_array);
9810      Add<HStoreNamedField>(receiver,
9811          HObjectAccess::ForMapAndOffset(initial_map,
9812                                         JSObject::kElementsOffset),
9813          empty_fixed_array);
9814      BuildInitializeInobjectProperties(receiver, initial_map);
9815    }
9816
9817    // Replace the constructor function with a newly allocated receiver using
9818    // the index of the receiver from the top of the expression stack.
9819    const int receiver_index = argument_count - 1;
9820    DCHECK(environment()->ExpressionStackAt(receiver_index) == function);
9821    environment()->SetExpressionStackAt(receiver_index, receiver);
9822
9823    if (TryInlineConstruct(expr, receiver)) {
9824      // Inlining worked, add a dependency on the initial map to make sure that
9825      // this code is deoptimized whenever the initial map of the constructor
9826      // changes.
9827      top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
9828      return;
9829    }
9830
9831    // TODO(mstarzinger): For now we remove the previous HAllocate and all
9832    // corresponding instructions and instead add HPushArguments for the
9833    // arguments in case inlining failed.  What we actually should do is for
9834    // inlining to try to build a subgraph without mutating the parent graph.
9835    HInstruction* instr = current_block()->last();
9836    do {
9837      HInstruction* prev_instr = instr->previous();
9838      instr->DeleteAndReplaceWith(NULL);
9839      instr = prev_instr;
9840    } while (instr != check);
9841    environment()->SetExpressionStackAt(receiver_index, function);
9842  } else {
9843    // The constructor function is both an operand to the instruction and an
9844    // argument to the construct call.
9845    if (TryHandleArrayCall(expr, function)) return;
9846  }
9847
9848  HValue* arity = Add<HConstant>(argument_count - 1);
9849  HValue* op_vals[] = {function, function, arity};
9850  Callable callable = CodeFactory::Construct(isolate());
9851  HConstant* stub = Add<HConstant>(callable.code());
9852  PushArgumentsFromEnvironment(argument_count);
9853  HInstruction* construct = New<HCallWithDescriptor>(
9854      stub, argument_count, callable.descriptor(), ArrayVector(op_vals));
9855  return ast_context()->ReturnInstruction(construct, expr->id());
9856}
9857
9858
9859void HOptimizedGraphBuilder::BuildInitializeInobjectProperties(
9860    HValue* receiver, Handle<Map> initial_map) {
9861  if (initial_map->GetInObjectProperties() != 0) {
9862    HConstant* undefined = graph()->GetConstantUndefined();
9863    for (int i = 0; i < initial_map->GetInObjectProperties(); i++) {
9864      int property_offset = initial_map->GetInObjectPropertyOffset(i);
9865      Add<HStoreNamedField>(receiver, HObjectAccess::ForMapAndOffset(
9866                                          initial_map, property_offset),
9867                            undefined);
9868    }
9869  }
9870}
9871
9872
9873HValue* HGraphBuilder::BuildAllocateEmptyArrayBuffer(HValue* byte_length) {
9874  // We HForceRepresentation here to avoid allocations during an *-to-tagged
9875  // HChange that could cause GC while the array buffer object is not fully
9876  // initialized.
9877  HObjectAccess byte_length_access(HObjectAccess::ForJSArrayBufferByteLength());
9878  byte_length = AddUncasted<HForceRepresentation>(
9879      byte_length, byte_length_access.representation());
9880  HAllocate* result =
9881      BuildAllocate(Add<HConstant>(JSArrayBuffer::kSizeWithInternalFields),
9882                    HType::JSObject(), JS_ARRAY_BUFFER_TYPE, HAllocationMode());
9883
9884  HValue* native_context = BuildGetNativeContext();
9885  Add<HStoreNamedField>(
9886      result, HObjectAccess::ForMap(),
9887      Add<HLoadNamedField>(
9888          native_context, nullptr,
9889          HObjectAccess::ForContextSlot(Context::ARRAY_BUFFER_MAP_INDEX)));
9890
9891  HConstant* empty_fixed_array =
9892      Add<HConstant>(isolate()->factory()->empty_fixed_array());
9893  Add<HStoreNamedField>(
9894      result, HObjectAccess::ForJSArrayOffset(JSArray::kPropertiesOffset),
9895      empty_fixed_array);
9896  Add<HStoreNamedField>(
9897      result, HObjectAccess::ForJSArrayOffset(JSArray::kElementsOffset),
9898      empty_fixed_array);
9899  Add<HStoreNamedField>(
9900      result, HObjectAccess::ForJSArrayBufferBackingStore().WithRepresentation(
9901                  Representation::Smi()),
9902      graph()->GetConstant0());
9903  Add<HStoreNamedField>(result, byte_length_access, byte_length);
9904  Add<HStoreNamedField>(result, HObjectAccess::ForJSArrayBufferBitFieldSlot(),
9905                        graph()->GetConstant0());
9906  Add<HStoreNamedField>(
9907      result, HObjectAccess::ForJSArrayBufferBitField(),
9908      Add<HConstant>((1 << JSArrayBuffer::IsExternal::kShift) |
9909                     (1 << JSArrayBuffer::IsNeuterable::kShift)));
9910
9911  for (int field = 0; field < v8::ArrayBuffer::kInternalFieldCount; ++field) {
9912    Add<HStoreNamedField>(
9913        result,
9914        HObjectAccess::ForObservableJSObjectOffset(
9915            JSArrayBuffer::kSize + field * kPointerSize, Representation::Smi()),
9916        graph()->GetConstant0());
9917  }
9918
9919  return result;
9920}
9921
9922
9923template <class ViewClass>
9924void HGraphBuilder::BuildArrayBufferViewInitialization(
9925    HValue* obj,
9926    HValue* buffer,
9927    HValue* byte_offset,
9928    HValue* byte_length) {
9929
9930  for (int offset = ViewClass::kSize;
9931       offset < ViewClass::kSizeWithInternalFields;
9932       offset += kPointerSize) {
9933    Add<HStoreNamedField>(obj,
9934        HObjectAccess::ForObservableJSObjectOffset(offset),
9935        graph()->GetConstant0());
9936  }
9937
9938  Add<HStoreNamedField>(
9939      obj,
9940      HObjectAccess::ForJSArrayBufferViewByteOffset(),
9941      byte_offset);
9942  Add<HStoreNamedField>(
9943      obj,
9944      HObjectAccess::ForJSArrayBufferViewByteLength(),
9945      byte_length);
9946  Add<HStoreNamedField>(obj, HObjectAccess::ForJSArrayBufferViewBuffer(),
9947                        buffer);
9948}
9949
9950
9951HValue* HOptimizedGraphBuilder::BuildAllocateExternalElements(
9952    ExternalArrayType array_type,
9953    bool is_zero_byte_offset,
9954    HValue* buffer, HValue* byte_offset, HValue* length) {
9955  Handle<Map> external_array_map(
9956      isolate()->heap()->MapForFixedTypedArray(array_type));
9957
9958  // The HForceRepresentation is to prevent possible deopt on int-smi
9959  // conversion after allocation but before the new object fields are set.
9960  length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
9961  HValue* elements = Add<HAllocate>(
9962      Add<HConstant>(FixedTypedArrayBase::kHeaderSize), HType::HeapObject(),
9963      NOT_TENURED, external_array_map->instance_type(),
9964      graph()->GetConstant0());
9965
9966  AddStoreMapConstant(elements, external_array_map);
9967  Add<HStoreNamedField>(elements,
9968      HObjectAccess::ForFixedArrayLength(), length);
9969
9970  HValue* backing_store = Add<HLoadNamedField>(
9971      buffer, nullptr, HObjectAccess::ForJSArrayBufferBackingStore());
9972
9973  HValue* typed_array_start;
9974  if (is_zero_byte_offset) {
9975    typed_array_start = backing_store;
9976  } else {
9977    HInstruction* external_pointer =
9978        AddUncasted<HAdd>(backing_store, byte_offset);
9979    // Arguments are checked prior to call to TypedArrayInitialize,
9980    // including byte_offset.
9981    external_pointer->ClearFlag(HValue::kCanOverflow);
9982    typed_array_start = external_pointer;
9983  }
9984
9985  Add<HStoreNamedField>(elements,
9986                        HObjectAccess::ForFixedTypedArrayBaseBasePointer(),
9987                        graph()->GetConstant0());
9988  Add<HStoreNamedField>(elements,
9989                        HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
9990                        typed_array_start);
9991
9992  return elements;
9993}
9994
9995
9996HValue* HOptimizedGraphBuilder::BuildAllocateFixedTypedArray(
9997    ExternalArrayType array_type, size_t element_size,
9998    ElementsKind fixed_elements_kind, HValue* byte_length, HValue* length,
9999    bool initialize) {
10000  STATIC_ASSERT(
10001      (FixedTypedArrayBase::kHeaderSize & kObjectAlignmentMask) == 0);
10002  HValue* total_size;
10003
10004  // if fixed array's elements are not aligned to object's alignment,
10005  // we need to align the whole array to object alignment.
10006  if (element_size % kObjectAlignment != 0) {
10007    total_size = BuildObjectSizeAlignment(
10008        byte_length, FixedTypedArrayBase::kHeaderSize);
10009  } else {
10010    total_size = AddUncasted<HAdd>(byte_length,
10011        Add<HConstant>(FixedTypedArrayBase::kHeaderSize));
10012    total_size->ClearFlag(HValue::kCanOverflow);
10013  }
10014
10015  // The HForceRepresentation is to prevent possible deopt on int-smi
10016  // conversion after allocation but before the new object fields are set.
10017  length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10018  Handle<Map> fixed_typed_array_map(
10019      isolate()->heap()->MapForFixedTypedArray(array_type));
10020  HAllocate* elements = Add<HAllocate>(
10021      total_size, HType::HeapObject(), NOT_TENURED,
10022      fixed_typed_array_map->instance_type(), graph()->GetConstant0());
10023
10024#ifndef V8_HOST_ARCH_64_BIT
10025  if (array_type == kExternalFloat64Array) {
10026    elements->MakeDoubleAligned();
10027  }
10028#endif
10029
10030  AddStoreMapConstant(elements, fixed_typed_array_map);
10031
10032  Add<HStoreNamedField>(elements,
10033      HObjectAccess::ForFixedArrayLength(),
10034      length);
10035  Add<HStoreNamedField>(
10036      elements, HObjectAccess::ForFixedTypedArrayBaseBasePointer(), elements);
10037
10038  Add<HStoreNamedField>(
10039      elements, HObjectAccess::ForFixedTypedArrayBaseExternalPointer(),
10040      Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()));
10041
10042  HValue* filler = Add<HConstant>(static_cast<int32_t>(0));
10043
10044  if (initialize) {
10045    LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement);
10046
10047    HValue* backing_store = AddUncasted<HAdd>(
10048        Add<HConstant>(ExternalReference::fixed_typed_array_base_data_offset()),
10049        elements, AddOfExternalAndTagged);
10050
10051    HValue* key = builder.BeginBody(
10052        Add<HConstant>(static_cast<int32_t>(0)),
10053        length, Token::LT);
10054    Add<HStoreKeyed>(backing_store, key, filler, elements, fixed_elements_kind);
10055
10056    builder.EndBody();
10057  }
10058  return elements;
10059}
10060
10061
10062void HOptimizedGraphBuilder::GenerateTypedArrayInitialize(
10063    CallRuntime* expr) {
10064  ZoneList<Expression*>* arguments = expr->arguments();
10065
10066  static const int kObjectArg = 0;
10067  static const int kArrayIdArg = 1;
10068  static const int kBufferArg = 2;
10069  static const int kByteOffsetArg = 3;
10070  static const int kByteLengthArg = 4;
10071  static const int kInitializeArg = 5;
10072  static const int kArgsLength = 6;
10073  DCHECK(arguments->length() == kArgsLength);
10074
10075
10076  CHECK_ALIVE(VisitForValue(arguments->at(kObjectArg)));
10077  HValue* obj = Pop();
10078
10079  if (!arguments->at(kArrayIdArg)->IsLiteral()) {
10080    // This should never happen in real use, but can happen when fuzzing.
10081    // Just bail out.
10082    Bailout(kNeedSmiLiteral);
10083    return;
10084  }
10085  Handle<Object> value =
10086      static_cast<Literal*>(arguments->at(kArrayIdArg))->value();
10087  if (!value->IsSmi()) {
10088    // This should never happen in real use, but can happen when fuzzing.
10089    // Just bail out.
10090    Bailout(kNeedSmiLiteral);
10091    return;
10092  }
10093  int array_id = Smi::cast(*value)->value();
10094
10095  HValue* buffer;
10096  if (!arguments->at(kBufferArg)->IsNullLiteral()) {
10097    CHECK_ALIVE(VisitForValue(arguments->at(kBufferArg)));
10098    buffer = Pop();
10099  } else {
10100    buffer = NULL;
10101  }
10102
10103  HValue* byte_offset;
10104  bool is_zero_byte_offset;
10105
10106  if (arguments->at(kByteOffsetArg)->IsLiteral() &&
10107      Smi::kZero ==
10108          *static_cast<Literal*>(arguments->at(kByteOffsetArg))->value()) {
10109    byte_offset = Add<HConstant>(static_cast<int32_t>(0));
10110    is_zero_byte_offset = true;
10111  } else {
10112    CHECK_ALIVE(VisitForValue(arguments->at(kByteOffsetArg)));
10113    byte_offset = Pop();
10114    is_zero_byte_offset = false;
10115    DCHECK(buffer != NULL);
10116  }
10117
10118  CHECK_ALIVE(VisitForValue(arguments->at(kByteLengthArg)));
10119  HValue* byte_length = Pop();
10120
10121  CHECK(arguments->at(kInitializeArg)->IsLiteral());
10122  bool initialize = static_cast<Literal*>(arguments->at(kInitializeArg))
10123                        ->value()
10124                        ->BooleanValue();
10125
10126  NoObservableSideEffectsScope scope(this);
10127  IfBuilder byte_offset_smi(this);
10128
10129  if (!is_zero_byte_offset) {
10130    byte_offset_smi.If<HIsSmiAndBranch>(byte_offset);
10131    byte_offset_smi.Then();
10132  }
10133
10134  ExternalArrayType array_type =
10135      kExternalInt8Array;  // Bogus initialization.
10136  size_t element_size = 1;  // Bogus initialization.
10137  ElementsKind fixed_elements_kind =  // Bogus initialization.
10138      INT8_ELEMENTS;
10139  Runtime::ArrayIdToTypeAndSize(array_id,
10140      &array_type,
10141      &fixed_elements_kind,
10142      &element_size);
10143
10144
10145  { //  byte_offset is Smi.
10146    HValue* allocated_buffer = buffer;
10147    if (buffer == NULL) {
10148      allocated_buffer = BuildAllocateEmptyArrayBuffer(byte_length);
10149    }
10150    BuildArrayBufferViewInitialization<JSTypedArray>(obj, allocated_buffer,
10151                                                     byte_offset, byte_length);
10152
10153
10154    HInstruction* length = AddUncasted<HDiv>(byte_length,
10155        Add<HConstant>(static_cast<int32_t>(element_size)));
10156    // Callers (in typedarray.js) ensure that length <= %_MaxSmi().
10157    length = AddUncasted<HForceRepresentation>(length, Representation::Smi());
10158
10159    Add<HStoreNamedField>(obj,
10160        HObjectAccess::ForJSTypedArrayLength(),
10161        length);
10162
10163    HValue* elements;
10164    if (buffer != NULL) {
10165      elements = BuildAllocateExternalElements(
10166          array_type, is_zero_byte_offset, buffer, byte_offset, length);
10167    } else {
10168      DCHECK(is_zero_byte_offset);
10169      elements = BuildAllocateFixedTypedArray(array_type, element_size,
10170                                              fixed_elements_kind, byte_length,
10171                                              length, initialize);
10172    }
10173    Add<HStoreNamedField>(
10174        obj, HObjectAccess::ForElementsPointer(), elements);
10175  }
10176
10177  if (!is_zero_byte_offset) {
10178    byte_offset_smi.Else();
10179    { //  byte_offset is not Smi.
10180      Push(obj);
10181      CHECK_ALIVE(VisitForValue(arguments->at(kArrayIdArg)));
10182      Push(buffer);
10183      Push(byte_offset);
10184      Push(byte_length);
10185      CHECK_ALIVE(VisitForValue(arguments->at(kInitializeArg)));
10186      PushArgumentsFromEnvironment(kArgsLength);
10187      Add<HCallRuntime>(expr->function(), kArgsLength);
10188    }
10189  }
10190  byte_offset_smi.End();
10191}
10192
10193
10194void HOptimizedGraphBuilder::GenerateMaxSmi(CallRuntime* expr) {
10195  DCHECK(expr->arguments()->length() == 0);
10196  HConstant* max_smi = New<HConstant>(static_cast<int32_t>(Smi::kMaxValue));
10197  return ast_context()->ReturnInstruction(max_smi, expr->id());
10198}
10199
10200
10201void HOptimizedGraphBuilder::GenerateTypedArrayMaxSizeInHeap(
10202    CallRuntime* expr) {
10203  DCHECK(expr->arguments()->length() == 0);
10204  HConstant* result = New<HConstant>(static_cast<int32_t>(
10205        FLAG_typed_array_max_size_in_heap));
10206  return ast_context()->ReturnInstruction(result, expr->id());
10207}
10208
10209
10210void HOptimizedGraphBuilder::GenerateArrayBufferGetByteLength(
10211    CallRuntime* expr) {
10212  DCHECK(expr->arguments()->length() == 1);
10213  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10214  HValue* buffer = Pop();
10215  HInstruction* result = New<HLoadNamedField>(
10216      buffer, nullptr, HObjectAccess::ForJSArrayBufferByteLength());
10217  return ast_context()->ReturnInstruction(result, expr->id());
10218}
10219
10220
10221void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteLength(
10222    CallRuntime* expr) {
10223  NoObservableSideEffectsScope scope(this);
10224  DCHECK(expr->arguments()->length() == 1);
10225  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10226  HValue* view = Pop();
10227
10228  return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10229      view, nullptr,
10230      FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteLengthOffset)));
10231}
10232
10233
10234void HOptimizedGraphBuilder::GenerateArrayBufferViewGetByteOffset(
10235    CallRuntime* expr) {
10236  NoObservableSideEffectsScope scope(this);
10237  DCHECK(expr->arguments()->length() == 1);
10238  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10239  HValue* view = Pop();
10240
10241  return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10242      view, nullptr,
10243      FieldIndex::ForInObjectOffset(JSArrayBufferView::kByteOffsetOffset)));
10244}
10245
10246
10247void HOptimizedGraphBuilder::GenerateTypedArrayGetLength(
10248    CallRuntime* expr) {
10249  NoObservableSideEffectsScope scope(this);
10250  DCHECK(expr->arguments()->length() == 1);
10251  CHECK_ALIVE(VisitForValue(expr->arguments()->at(0)));
10252  HValue* view = Pop();
10253
10254  return ast_context()->ReturnValue(BuildArrayBufferViewFieldAccessor(
10255      view, nullptr,
10256      FieldIndex::ForInObjectOffset(JSTypedArray::kLengthOffset)));
10257}
10258
10259
10260void HOptimizedGraphBuilder::VisitCallRuntime(CallRuntime* expr) {
10261  DCHECK(!HasStackOverflow());
10262  DCHECK(current_block() != NULL);
10263  DCHECK(current_block()->HasPredecessor());
10264  if (expr->is_jsruntime()) {
10265    // Crankshaft always specializes to the native context, so we can just grab
10266    // the constant function from the current native context and embed that into
10267    // the code object.
10268    Handle<JSFunction> known_function(
10269        JSFunction::cast(
10270            current_info()->native_context()->get(expr->context_index())),
10271        isolate());
10272
10273    // The callee and the receiver both have to be pushed onto the operand stack
10274    // before arguments are being evaluated.
10275    HConstant* function = Add<HConstant>(known_function);
10276    HValue* receiver = ImplicitReceiverFor(function, known_function);
10277    Push(function);
10278    Push(receiver);
10279
10280    int argument_count = expr->arguments()->length() + 1;  // Count receiver.
10281    CHECK_ALIVE(VisitExpressions(expr->arguments()));
10282    PushArgumentsFromEnvironment(argument_count);
10283    HInstruction* call = NewCallConstantFunction(known_function, argument_count,
10284                                                 TailCallMode::kDisallow,
10285                                                 TailCallMode::kDisallow);
10286    Drop(1);  // Function
10287    return ast_context()->ReturnInstruction(call, expr->id());
10288  }
10289
10290  const Runtime::Function* function = expr->function();
10291  DCHECK(function != NULL);
10292  switch (function->function_id) {
10293#define CALL_INTRINSIC_GENERATOR(Name) \
10294  case Runtime::kInline##Name:         \
10295    return Generate##Name(expr);
10296
10297    FOR_EACH_HYDROGEN_INTRINSIC(CALL_INTRINSIC_GENERATOR)
10298#undef CALL_INTRINSIC_GENERATOR
10299    default: {
10300      int argument_count = expr->arguments()->length();
10301      CHECK_ALIVE(VisitExpressions(expr->arguments()));
10302      PushArgumentsFromEnvironment(argument_count);
10303      HCallRuntime* call = New<HCallRuntime>(function, argument_count);
10304      return ast_context()->ReturnInstruction(call, expr->id());
10305    }
10306  }
10307}
10308
10309
10310void HOptimizedGraphBuilder::VisitUnaryOperation(UnaryOperation* expr) {
10311  DCHECK(!HasStackOverflow());
10312  DCHECK(current_block() != NULL);
10313  DCHECK(current_block()->HasPredecessor());
10314  switch (expr->op()) {
10315    case Token::DELETE: return VisitDelete(expr);
10316    case Token::VOID: return VisitVoid(expr);
10317    case Token::TYPEOF: return VisitTypeof(expr);
10318    case Token::NOT: return VisitNot(expr);
10319    default: UNREACHABLE();
10320  }
10321}
10322
10323
10324void HOptimizedGraphBuilder::VisitDelete(UnaryOperation* expr) {
10325  Property* prop = expr->expression()->AsProperty();
10326  VariableProxy* proxy = expr->expression()->AsVariableProxy();
10327  if (prop != NULL) {
10328    CHECK_ALIVE(VisitForValue(prop->obj()));
10329    CHECK_ALIVE(VisitForValue(prop->key()));
10330    HValue* key = Pop();
10331    HValue* obj = Pop();
10332    Add<HPushArguments>(obj, key);
10333    HInstruction* instr = New<HCallRuntime>(
10334        Runtime::FunctionForId(is_strict(function_language_mode())
10335                                   ? Runtime::kDeleteProperty_Strict
10336                                   : Runtime::kDeleteProperty_Sloppy),
10337        2);
10338    return ast_context()->ReturnInstruction(instr, expr->id());
10339  } else if (proxy != NULL) {
10340    Variable* var = proxy->var();
10341    if (var->IsUnallocated()) {
10342      Bailout(kDeleteWithGlobalVariable);
10343    } else if (var->IsStackAllocated() || var->IsContextSlot()) {
10344      // Result of deleting non-global variables is false.  'this' is not really
10345      // a variable, though we implement it as one.  The subexpression does not
10346      // have side effects.
10347      HValue* value = var->is_this() ? graph()->GetConstantTrue()
10348                                     : graph()->GetConstantFalse();
10349      return ast_context()->ReturnValue(value);
10350    } else {
10351      Bailout(kDeleteWithNonGlobalVariable);
10352    }
10353  } else {
10354    // Result of deleting non-property, non-variable reference is true.
10355    // Evaluate the subexpression for side effects.
10356    CHECK_ALIVE(VisitForEffect(expr->expression()));
10357    return ast_context()->ReturnValue(graph()->GetConstantTrue());
10358  }
10359}
10360
10361
10362void HOptimizedGraphBuilder::VisitVoid(UnaryOperation* expr) {
10363  CHECK_ALIVE(VisitForEffect(expr->expression()));
10364  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
10365}
10366
10367
10368void HOptimizedGraphBuilder::VisitTypeof(UnaryOperation* expr) {
10369  CHECK_ALIVE(VisitForTypeOf(expr->expression()));
10370  HValue* value = Pop();
10371  HInstruction* instr = New<HTypeof>(value);
10372  return ast_context()->ReturnInstruction(instr, expr->id());
10373}
10374
10375
10376void HOptimizedGraphBuilder::VisitNot(UnaryOperation* expr) {
10377  if (ast_context()->IsTest()) {
10378    TestContext* context = TestContext::cast(ast_context());
10379    VisitForControl(expr->expression(),
10380                    context->if_false(),
10381                    context->if_true());
10382    return;
10383  }
10384
10385  if (ast_context()->IsEffect()) {
10386    VisitForEffect(expr->expression());
10387    return;
10388  }
10389
10390  DCHECK(ast_context()->IsValue());
10391  HBasicBlock* materialize_false = graph()->CreateBasicBlock();
10392  HBasicBlock* materialize_true = graph()->CreateBasicBlock();
10393  CHECK_BAILOUT(VisitForControl(expr->expression(),
10394                                materialize_false,
10395                                materialize_true));
10396
10397  if (materialize_false->HasPredecessor()) {
10398    materialize_false->SetJoinId(expr->MaterializeFalseId());
10399    set_current_block(materialize_false);
10400    Push(graph()->GetConstantFalse());
10401  } else {
10402    materialize_false = NULL;
10403  }
10404
10405  if (materialize_true->HasPredecessor()) {
10406    materialize_true->SetJoinId(expr->MaterializeTrueId());
10407    set_current_block(materialize_true);
10408    Push(graph()->GetConstantTrue());
10409  } else {
10410    materialize_true = NULL;
10411  }
10412
10413  HBasicBlock* join =
10414    CreateJoin(materialize_false, materialize_true, expr->id());
10415  set_current_block(join);
10416  if (join != NULL) return ast_context()->ReturnValue(Pop());
10417}
10418
10419static Representation RepresentationFor(AstType* type) {
10420  DisallowHeapAllocation no_allocation;
10421  if (type->Is(AstType::None())) return Representation::None();
10422  if (type->Is(AstType::SignedSmall())) return Representation::Smi();
10423  if (type->Is(AstType::Signed32())) return Representation::Integer32();
10424  if (type->Is(AstType::Number())) return Representation::Double();
10425  return Representation::Tagged();
10426}
10427
10428HInstruction* HOptimizedGraphBuilder::BuildIncrement(CountOperation* expr) {
10429  // The input to the count operation is on top of the expression stack.
10430  Representation rep = RepresentationFor(expr->type());
10431  if (rep.IsNone() || rep.IsTagged()) {
10432    rep = Representation::Smi();
10433  }
10434
10435  // We need an explicit HValue representing ToNumber(input).  The
10436  // actual HChange instruction we need is (sometimes) added in a later
10437  // phase, so it is not available now to be used as an input to HAdd and
10438  // as the return value.
10439  HInstruction* number_input = AddUncasted<HForceRepresentation>(Pop(), rep);
10440  if (!rep.IsDouble()) {
10441    number_input->SetFlag(HInstruction::kFlexibleRepresentation);
10442    number_input->SetFlag(HInstruction::kCannotBeTagged);
10443  }
10444  Push(number_input);
10445
10446  // The addition has no side effects, so we do not need
10447  // to simulate the expression stack after this instruction.
10448  // Any later failures deopt to the load of the input or earlier.
10449  HConstant* delta = (expr->op() == Token::INC)
10450      ? graph()->GetConstant1()
10451      : graph()->GetConstantMinus1();
10452  HInstruction* instr = AddUncasted<HAdd>(Top(), delta);
10453  if (instr->IsAdd()) {
10454    HAdd* add = HAdd::cast(instr);
10455    add->set_observed_input_representation(1, rep);
10456    add->set_observed_input_representation(2, Representation::Smi());
10457  }
10458  instr->ClearAllSideEffects();
10459  instr->SetFlag(HInstruction::kCannotBeTagged);
10460  return instr;
10461}
10462
10463void HOptimizedGraphBuilder::BuildStoreForEffect(
10464    Expression* expr, Property* prop, FeedbackSlot slot, BailoutId ast_id,
10465    BailoutId return_id, HValue* object, HValue* key, HValue* value) {
10466  EffectContext for_effect(this);
10467  Push(object);
10468  if (key != NULL) Push(key);
10469  Push(value);
10470  BuildStore(expr, prop, slot, ast_id, return_id);
10471}
10472
10473
10474void HOptimizedGraphBuilder::VisitCountOperation(CountOperation* expr) {
10475  DCHECK(!HasStackOverflow());
10476  DCHECK(current_block() != NULL);
10477  DCHECK(current_block()->HasPredecessor());
10478  if (!is_tracking_positions()) SetSourcePosition(expr->position());
10479  Expression* target = expr->expression();
10480  VariableProxy* proxy = target->AsVariableProxy();
10481  Property* prop = target->AsProperty();
10482  if (proxy == NULL && prop == NULL) {
10483    return Bailout(kInvalidLhsInCountOperation);
10484  }
10485
10486  // Match the full code generator stack by simulating an extra stack
10487  // element for postfix operations in a non-effect context.  The return
10488  // value is ToNumber(input).
10489  bool returns_original_input =
10490      expr->is_postfix() && !ast_context()->IsEffect();
10491  HValue* input = NULL;  // ToNumber(original_input).
10492  HValue* after = NULL;  // The result after incrementing or decrementing.
10493
10494  if (proxy != NULL) {
10495    Variable* var = proxy->var();
10496    if (var->mode() == CONST) {
10497      return Bailout(kNonInitializerAssignmentToConst);
10498    }
10499    // Argument of the count operation is a variable, not a property.
10500    DCHECK(prop == NULL);
10501    CHECK_ALIVE(VisitForValue(target));
10502
10503    after = BuildIncrement(expr);
10504    input = returns_original_input ? Top() : Pop();
10505    Push(after);
10506
10507    switch (var->location()) {
10508      case VariableLocation::UNALLOCATED:
10509        HandleGlobalVariableAssignment(var, after, expr->CountSlot(),
10510                                       expr->AssignmentId());
10511        break;
10512
10513      case VariableLocation::PARAMETER:
10514      case VariableLocation::LOCAL:
10515        BindIfLive(var, after);
10516        break;
10517
10518      case VariableLocation::CONTEXT: {
10519        HValue* context = BuildContextChainWalk(var);
10520        HStoreContextSlot::Mode mode = IsLexicalVariableMode(var->mode())
10521            ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck;
10522        HStoreContextSlot* instr = Add<HStoreContextSlot>(context, var->index(),
10523                                                          mode, after);
10524        if (instr->HasObservableSideEffects()) {
10525          Add<HSimulate>(expr->AssignmentId(), REMOVABLE_SIMULATE);
10526        }
10527        break;
10528      }
10529
10530      case VariableLocation::LOOKUP:
10531        return Bailout(kLookupVariableInCountOperation);
10532
10533      case VariableLocation::MODULE:
10534        UNREACHABLE();
10535    }
10536
10537    Drop(returns_original_input ? 2 : 1);
10538    return ast_context()->ReturnValue(expr->is_postfix() ? input : after);
10539  }
10540
10541  // Argument of the count operation is a property.
10542  DCHECK(prop != NULL);
10543  if (returns_original_input) Push(graph()->GetConstantUndefined());
10544
10545  CHECK_ALIVE(VisitForValue(prop->obj()));
10546  HValue* object = Top();
10547
10548  HValue* key = NULL;
10549  if (!prop->key()->IsPropertyName() || prop->IsStringAccess()) {
10550    CHECK_ALIVE(VisitForValue(prop->key()));
10551    key = Top();
10552  }
10553
10554  CHECK_ALIVE(PushLoad(prop, object, key));
10555
10556  after = BuildIncrement(expr);
10557
10558  if (returns_original_input) {
10559    input = Pop();
10560    // Drop object and key to push it again in the effect context below.
10561    Drop(key == NULL ? 1 : 2);
10562    environment()->SetExpressionStackAt(0, input);
10563    CHECK_ALIVE(BuildStoreForEffect(expr, prop, expr->CountSlot(), expr->id(),
10564                                    expr->AssignmentId(), object, key, after));
10565    return ast_context()->ReturnValue(Pop());
10566  }
10567
10568  environment()->SetExpressionStackAt(0, after);
10569  return BuildStore(expr, prop, expr->CountSlot(), expr->id(),
10570                    expr->AssignmentId());
10571}
10572
10573
10574HInstruction* HOptimizedGraphBuilder::BuildStringCharCodeAt(
10575    HValue* string,
10576    HValue* index) {
10577  if (string->IsConstant() && index->IsConstant()) {
10578    HConstant* c_string = HConstant::cast(string);
10579    HConstant* c_index = HConstant::cast(index);
10580    if (c_string->HasStringValue() && c_index->HasNumberValue()) {
10581      int32_t i = c_index->NumberValueAsInteger32();
10582      Handle<String> s = c_string->StringValue();
10583      if (i < 0 || i >= s->length()) {
10584        return New<HConstant>(std::numeric_limits<double>::quiet_NaN());
10585      }
10586      return New<HConstant>(s->Get(i));
10587    }
10588  }
10589  string = BuildCheckString(string);
10590  index = Add<HBoundsCheck>(index, AddLoadStringLength(string));
10591  return New<HStringCharCodeAt>(string, index);
10592}
10593
10594
10595// Checks if the given shift amounts have following forms:
10596// (N1) and (N2) with N1 + N2 = 32; (sa) and (32 - sa).
10597static bool ShiftAmountsAllowReplaceByRotate(HValue* sa,
10598                                             HValue* const32_minus_sa) {
10599  if (sa->IsConstant() && const32_minus_sa->IsConstant()) {
10600    const HConstant* c1 = HConstant::cast(sa);
10601    const HConstant* c2 = HConstant::cast(const32_minus_sa);
10602    return c1->HasInteger32Value() && c2->HasInteger32Value() &&
10603        (c1->Integer32Value() + c2->Integer32Value() == 32);
10604  }
10605  if (!const32_minus_sa->IsSub()) return false;
10606  HSub* sub = HSub::cast(const32_minus_sa);
10607  return sub->left()->EqualsInteger32Constant(32) && sub->right() == sa;
10608}
10609
10610
10611// Checks if the left and the right are shift instructions with the oposite
10612// directions that can be replaced by one rotate right instruction or not.
10613// Returns the operand and the shift amount for the rotate instruction in the
10614// former case.
10615bool HGraphBuilder::MatchRotateRight(HValue* left,
10616                                     HValue* right,
10617                                     HValue** operand,
10618                                     HValue** shift_amount) {
10619  HShl* shl;
10620  HShr* shr;
10621  if (left->IsShl() && right->IsShr()) {
10622    shl = HShl::cast(left);
10623    shr = HShr::cast(right);
10624  } else if (left->IsShr() && right->IsShl()) {
10625    shl = HShl::cast(right);
10626    shr = HShr::cast(left);
10627  } else {
10628    return false;
10629  }
10630  if (shl->left() != shr->left()) return false;
10631
10632  if (!ShiftAmountsAllowReplaceByRotate(shl->right(), shr->right()) &&
10633      !ShiftAmountsAllowReplaceByRotate(shr->right(), shl->right())) {
10634    return false;
10635  }
10636  *operand = shr->left();
10637  *shift_amount = shr->right();
10638  return true;
10639}
10640
10641
10642bool CanBeZero(HValue* right) {
10643  if (right->IsConstant()) {
10644    HConstant* right_const = HConstant::cast(right);
10645    if (right_const->HasInteger32Value() &&
10646       (right_const->Integer32Value() & 0x1f) != 0) {
10647      return false;
10648    }
10649  }
10650  return true;
10651}
10652
10653HValue* HGraphBuilder::EnforceNumberType(HValue* number, AstType* expected) {
10654  if (expected->Is(AstType::SignedSmall())) {
10655    return AddUncasted<HForceRepresentation>(number, Representation::Smi());
10656  }
10657  if (expected->Is(AstType::Signed32())) {
10658    return AddUncasted<HForceRepresentation>(number,
10659                                             Representation::Integer32());
10660  }
10661  return number;
10662}
10663
10664HValue* HGraphBuilder::TruncateToNumber(HValue* value, AstType** expected) {
10665  if (value->IsConstant()) {
10666    HConstant* constant = HConstant::cast(value);
10667    Maybe<HConstant*> number =
10668        constant->CopyToTruncatedNumber(isolate(), zone());
10669    if (number.IsJust()) {
10670      *expected = AstType::Number();
10671      return AddInstruction(number.FromJust());
10672    }
10673  }
10674
10675  // We put temporary values on the stack, which don't correspond to anything
10676  // in baseline code. Since nothing is observable we avoid recording those
10677  // pushes with a NoObservableSideEffectsScope.
10678  NoObservableSideEffectsScope no_effects(this);
10679
10680  AstType* expected_type = *expected;
10681
10682  // Separate the number type from the rest.
10683  AstType* expected_obj =
10684      AstType::Intersect(expected_type, AstType::NonNumber(), zone());
10685  AstType* expected_number =
10686      AstType::Intersect(expected_type, AstType::Number(), zone());
10687
10688  // We expect to get a number.
10689  // (We need to check first, since AstType::None->Is(AstType::Any()) == true.
10690  if (expected_obj->Is(AstType::None())) {
10691    DCHECK(!expected_number->Is(AstType::None()));
10692    return value;
10693  }
10694
10695  if (expected_obj->Is(AstType::Undefined())) {
10696    // This is already done by HChange.
10697    *expected = AstType::Union(expected_number, AstType::Number(), zone());
10698    return value;
10699  }
10700
10701  return value;
10702}
10703
10704
10705HValue* HOptimizedGraphBuilder::BuildBinaryOperation(
10706    BinaryOperation* expr,
10707    HValue* left,
10708    HValue* right,
10709    PushBeforeSimulateBehavior push_sim_result) {
10710  AstType* left_type = bounds_.get(expr->left()).lower;
10711  AstType* right_type = bounds_.get(expr->right()).lower;
10712  AstType* result_type = bounds_.get(expr).lower;
10713  Maybe<int> fixed_right_arg = expr->fixed_right_arg();
10714  Handle<AllocationSite> allocation_site = expr->allocation_site();
10715
10716  HAllocationMode allocation_mode;
10717  if (FLAG_allocation_site_pretenuring && !allocation_site.is_null()) {
10718    allocation_mode = HAllocationMode(allocation_site);
10719  }
10720  HValue* result = HGraphBuilder::BuildBinaryOperation(
10721      expr->op(), left, right, left_type, right_type, result_type,
10722      fixed_right_arg, allocation_mode, expr->id());
10723  // Add a simulate after instructions with observable side effects, and
10724  // after phis, which are the result of BuildBinaryOperation when we
10725  // inlined some complex subgraph.
10726  if (result->HasObservableSideEffects() || result->IsPhi()) {
10727    if (push_sim_result == PUSH_BEFORE_SIMULATE) {
10728      Push(result);
10729      Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10730      Drop(1);
10731    } else {
10732      Add<HSimulate>(expr->id(), REMOVABLE_SIMULATE);
10733    }
10734  }
10735  return result;
10736}
10737
10738HValue* HGraphBuilder::BuildBinaryOperation(
10739    Token::Value op, HValue* left, HValue* right, AstType* left_type,
10740    AstType* right_type, AstType* result_type, Maybe<int> fixed_right_arg,
10741    HAllocationMode allocation_mode, BailoutId opt_id) {
10742  bool maybe_string_add = false;
10743  if (op == Token::ADD) {
10744    // If we are adding constant string with something for which we don't have
10745    // a feedback yet, assume that it's also going to be a string and don't
10746    // generate deopt instructions.
10747    if (!left_type->IsInhabited() && right->IsConstant() &&
10748        HConstant::cast(right)->HasStringValue()) {
10749      left_type = AstType::String();
10750    }
10751
10752    if (!right_type->IsInhabited() && left->IsConstant() &&
10753        HConstant::cast(left)->HasStringValue()) {
10754      right_type = AstType::String();
10755    }
10756
10757    maybe_string_add = (left_type->Maybe(AstType::String()) ||
10758                        left_type->Maybe(AstType::Receiver()) ||
10759                        right_type->Maybe(AstType::String()) ||
10760                        right_type->Maybe(AstType::Receiver()));
10761  }
10762
10763  Representation left_rep = RepresentationFor(left_type);
10764  Representation right_rep = RepresentationFor(right_type);
10765
10766  if (!left_type->IsInhabited()) {
10767    Add<HDeoptimize>(
10768        DeoptimizeReason::kInsufficientTypeFeedbackForLHSOfBinaryOperation,
10769        Deoptimizer::SOFT);
10770    left_type = AstType::Any();
10771    left_rep = RepresentationFor(left_type);
10772    maybe_string_add = op == Token::ADD;
10773  }
10774
10775  if (!right_type->IsInhabited()) {
10776    Add<HDeoptimize>(
10777        DeoptimizeReason::kInsufficientTypeFeedbackForRHSOfBinaryOperation,
10778        Deoptimizer::SOFT);
10779    right_type = AstType::Any();
10780    right_rep = RepresentationFor(right_type);
10781    maybe_string_add = op == Token::ADD;
10782  }
10783
10784  if (!maybe_string_add) {
10785    left = TruncateToNumber(left, &left_type);
10786    right = TruncateToNumber(right, &right_type);
10787  }
10788
10789  // Special case for string addition here.
10790  if (op == Token::ADD &&
10791      (left_type->Is(AstType::String()) || right_type->Is(AstType::String()))) {
10792    // Validate type feedback for left argument.
10793    if (left_type->Is(AstType::String())) {
10794      left = BuildCheckString(left);
10795    }
10796
10797    // Validate type feedback for right argument.
10798    if (right_type->Is(AstType::String())) {
10799      right = BuildCheckString(right);
10800    }
10801
10802    // Convert left argument as necessary.
10803    if (left_type->Is(AstType::Number())) {
10804      DCHECK(right_type->Is(AstType::String()));
10805      left = BuildNumberToString(left, left_type);
10806    } else if (!left_type->Is(AstType::String())) {
10807      DCHECK(right_type->Is(AstType::String()));
10808      return AddUncasted<HStringAdd>(
10809          left, right, allocation_mode.GetPretenureMode(),
10810          STRING_ADD_CONVERT_LEFT, allocation_mode.feedback_site());
10811    }
10812
10813    // Convert right argument as necessary.
10814    if (right_type->Is(AstType::Number())) {
10815      DCHECK(left_type->Is(AstType::String()));
10816      right = BuildNumberToString(right, right_type);
10817    } else if (!right_type->Is(AstType::String())) {
10818      DCHECK(left_type->Is(AstType::String()));
10819      return AddUncasted<HStringAdd>(
10820          left, right, allocation_mode.GetPretenureMode(),
10821          STRING_ADD_CONVERT_RIGHT, allocation_mode.feedback_site());
10822    }
10823
10824    // Fast paths for empty constant strings.
10825    Handle<String> left_string =
10826        left->IsConstant() && HConstant::cast(left)->HasStringValue()
10827            ? HConstant::cast(left)->StringValue()
10828            : Handle<String>();
10829    Handle<String> right_string =
10830        right->IsConstant() && HConstant::cast(right)->HasStringValue()
10831            ? HConstant::cast(right)->StringValue()
10832            : Handle<String>();
10833    if (!left_string.is_null() && left_string->length() == 0) return right;
10834    if (!right_string.is_null() && right_string->length() == 0) return left;
10835    if (!left_string.is_null() && !right_string.is_null()) {
10836      return AddUncasted<HStringAdd>(
10837          left, right, allocation_mode.GetPretenureMode(),
10838          STRING_ADD_CHECK_NONE, allocation_mode.feedback_site());
10839    }
10840
10841    // Register the dependent code with the allocation site.
10842    if (!allocation_mode.feedback_site().is_null()) {
10843      DCHECK(!graph()->info()->IsStub());
10844      Handle<AllocationSite> site(allocation_mode.feedback_site());
10845      top_info()->dependencies()->AssumeTenuringDecision(site);
10846    }
10847
10848    // Inline the string addition into the stub when creating allocation
10849    // mementos to gather allocation site feedback, or if we can statically
10850    // infer that we're going to create a cons string.
10851    if ((graph()->info()->IsStub() &&
10852         allocation_mode.CreateAllocationMementos()) ||
10853        (left->IsConstant() &&
10854         HConstant::cast(left)->HasStringValue() &&
10855         HConstant::cast(left)->StringValue()->length() + 1 >=
10856           ConsString::kMinLength) ||
10857        (right->IsConstant() &&
10858         HConstant::cast(right)->HasStringValue() &&
10859         HConstant::cast(right)->StringValue()->length() + 1 >=
10860           ConsString::kMinLength)) {
10861      return BuildStringAdd(left, right, allocation_mode);
10862    }
10863
10864    // Fallback to using the string add stub.
10865    return AddUncasted<HStringAdd>(
10866        left, right, allocation_mode.GetPretenureMode(), STRING_ADD_CHECK_NONE,
10867        allocation_mode.feedback_site());
10868  }
10869
10870  // Special case for +x here.
10871  if (op == Token::MUL) {
10872    if (left->EqualsInteger32Constant(1)) {
10873      return BuildToNumber(right);
10874    }
10875    if (right->EqualsInteger32Constant(1)) {
10876      return BuildToNumber(left);
10877    }
10878  }
10879
10880  if (graph()->info()->IsStub()) {
10881    left = EnforceNumberType(left, left_type);
10882    right = EnforceNumberType(right, right_type);
10883  }
10884
10885  Representation result_rep = RepresentationFor(result_type);
10886
10887  bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
10888                          (right_rep.IsTagged() && !right_rep.IsSmi());
10889
10890  HInstruction* instr = NULL;
10891  // Only the stub is allowed to call into the runtime, since otherwise we would
10892  // inline several instructions (including the two pushes) for every tagged
10893  // operation in optimized code, which is more expensive, than a stub call.
10894  if (graph()->info()->IsStub() && is_non_primitive) {
10895    HValue* values[] = {left, right};
10896#define GET_STUB(Name)                                                       \
10897  do {                                                                       \
10898    Callable callable = CodeFactory::Name(isolate());                        \
10899    HValue* stub = Add<HConstant>(callable.code());                          \
10900    instr = AddUncasted<HCallWithDescriptor>(stub, 0, callable.descriptor(), \
10901                                             ArrayVector(values));           \
10902  } while (false)
10903
10904    switch (op) {
10905      default:
10906        UNREACHABLE();
10907      case Token::ADD:
10908        GET_STUB(Add);
10909        break;
10910      case Token::SUB:
10911        GET_STUB(Subtract);
10912        break;
10913      case Token::MUL:
10914        GET_STUB(Multiply);
10915        break;
10916      case Token::DIV:
10917        GET_STUB(Divide);
10918        break;
10919      case Token::MOD:
10920        GET_STUB(Modulus);
10921        break;
10922      case Token::BIT_OR:
10923        GET_STUB(BitwiseOr);
10924        break;
10925      case Token::BIT_AND:
10926        GET_STUB(BitwiseAnd);
10927        break;
10928      case Token::BIT_XOR:
10929        GET_STUB(BitwiseXor);
10930        break;
10931      case Token::SAR:
10932        GET_STUB(ShiftRight);
10933        break;
10934      case Token::SHR:
10935        GET_STUB(ShiftRightLogical);
10936        break;
10937      case Token::SHL:
10938        GET_STUB(ShiftLeft);
10939        break;
10940    }
10941#undef GET_STUB
10942  } else {
10943    switch (op) {
10944      case Token::ADD:
10945        instr = AddUncasted<HAdd>(left, right);
10946        break;
10947      case Token::SUB:
10948        instr = AddUncasted<HSub>(left, right);
10949        break;
10950      case Token::MUL:
10951        instr = AddUncasted<HMul>(left, right);
10952        break;
10953      case Token::MOD: {
10954        if (fixed_right_arg.IsJust() &&
10955            !right->EqualsInteger32Constant(fixed_right_arg.FromJust())) {
10956          HConstant* fixed_right =
10957              Add<HConstant>(static_cast<int>(fixed_right_arg.FromJust()));
10958          IfBuilder if_same(this);
10959          if_same.If<HCompareNumericAndBranch>(right, fixed_right, Token::EQ);
10960          if_same.Then();
10961          if_same.ElseDeopt(DeoptimizeReason::kUnexpectedRHSOfBinaryOperation);
10962          right = fixed_right;
10963        }
10964        instr = AddUncasted<HMod>(left, right);
10965        break;
10966      }
10967      case Token::DIV:
10968        instr = AddUncasted<HDiv>(left, right);
10969        break;
10970      case Token::BIT_XOR:
10971      case Token::BIT_AND:
10972        instr = AddUncasted<HBitwise>(op, left, right);
10973        break;
10974      case Token::BIT_OR: {
10975        HValue *operand, *shift_amount;
10976        if (left_type->Is(AstType::Signed32()) &&
10977            right_type->Is(AstType::Signed32()) &&
10978            MatchRotateRight(left, right, &operand, &shift_amount)) {
10979          instr = AddUncasted<HRor>(operand, shift_amount);
10980        } else {
10981          instr = AddUncasted<HBitwise>(op, left, right);
10982        }
10983        break;
10984      }
10985      case Token::SAR:
10986        instr = AddUncasted<HSar>(left, right);
10987        break;
10988      case Token::SHR:
10989        instr = AddUncasted<HShr>(left, right);
10990        if (instr->IsShr() && CanBeZero(right)) {
10991          graph()->RecordUint32Instruction(instr);
10992        }
10993        break;
10994      case Token::SHL:
10995        instr = AddUncasted<HShl>(left, right);
10996        break;
10997      default:
10998        UNREACHABLE();
10999    }
11000  }
11001
11002  if (instr->IsBinaryOperation()) {
11003    HBinaryOperation* binop = HBinaryOperation::cast(instr);
11004    binop->set_observed_input_representation(1, left_rep);
11005    binop->set_observed_input_representation(2, right_rep);
11006    binop->initialize_output_representation(result_rep);
11007    if (graph()->info()->IsStub()) {
11008      // Stub should not call into stub.
11009      instr->SetFlag(HValue::kCannotBeTagged);
11010      // And should truncate on HForceRepresentation already.
11011      if (left->IsForceRepresentation()) {
11012        left->CopyFlag(HValue::kTruncatingToSmi, instr);
11013        left->CopyFlag(HValue::kTruncatingToInt32, instr);
11014      }
11015      if (right->IsForceRepresentation()) {
11016        right->CopyFlag(HValue::kTruncatingToSmi, instr);
11017        right->CopyFlag(HValue::kTruncatingToInt32, instr);
11018      }
11019    }
11020  }
11021  return instr;
11022}
11023
11024
11025// Check for the form (%_ClassOf(foo) === 'BarClass').
11026static bool IsClassOfTest(CompareOperation* expr) {
11027  if (expr->op() != Token::EQ_STRICT) return false;
11028  CallRuntime* call = expr->left()->AsCallRuntime();
11029  if (call == NULL) return false;
11030  Literal* literal = expr->right()->AsLiteral();
11031  if (literal == NULL) return false;
11032  if (!literal->value()->IsString()) return false;
11033  if (call->is_jsruntime()) return false;
11034  if (call->function()->function_id != Runtime::kInlineClassOf) return false;
11035  DCHECK_EQ(call->arguments()->length(), 1);
11036  return true;
11037}
11038
11039void HOptimizedGraphBuilder::VisitBinaryOperation(BinaryOperation* expr) {
11040  DCHECK(!HasStackOverflow());
11041  DCHECK(current_block() != NULL);
11042  DCHECK(current_block()->HasPredecessor());
11043  switch (expr->op()) {
11044    case Token::COMMA:
11045      return VisitComma(expr);
11046    case Token::OR:
11047    case Token::AND:
11048      return VisitLogicalExpression(expr);
11049    default:
11050      return VisitArithmeticExpression(expr);
11051  }
11052}
11053
11054
11055void HOptimizedGraphBuilder::VisitComma(BinaryOperation* expr) {
11056  CHECK_ALIVE(VisitForEffect(expr->left()));
11057  // Visit the right subexpression in the same AST context as the entire
11058  // expression.
11059  Visit(expr->right());
11060}
11061
11062
11063void HOptimizedGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
11064  bool is_logical_and = expr->op() == Token::AND;
11065  if (ast_context()->IsTest()) {
11066    TestContext* context = TestContext::cast(ast_context());
11067    // Translate left subexpression.
11068    HBasicBlock* eval_right = graph()->CreateBasicBlock();
11069    if (is_logical_and) {
11070      CHECK_BAILOUT(VisitForControl(expr->left(),
11071                                    eval_right,
11072                                    context->if_false()));
11073    } else {
11074      CHECK_BAILOUT(VisitForControl(expr->left(),
11075                                    context->if_true(),
11076                                    eval_right));
11077    }
11078
11079    // Translate right subexpression by visiting it in the same AST
11080    // context as the entire expression.
11081    CHECK(eval_right->HasPredecessor());
11082    eval_right->SetJoinId(expr->RightId());
11083    set_current_block(eval_right);
11084    Visit(expr->right());
11085  } else if (ast_context()->IsValue()) {
11086    CHECK_ALIVE(VisitForValue(expr->left()));
11087    DCHECK(current_block() != NULL);
11088    HValue* left_value = Top();
11089
11090    // Short-circuit left values that always evaluate to the same boolean value.
11091    if (expr->left()->ToBooleanIsTrue() || expr->left()->ToBooleanIsFalse()) {
11092      // l (evals true)  && r -> r
11093      // l (evals true)  || r -> l
11094      // l (evals false) && r -> l
11095      // l (evals false) || r -> r
11096      if (is_logical_and == expr->left()->ToBooleanIsTrue()) {
11097        Drop(1);
11098        CHECK_ALIVE(VisitForValue(expr->right()));
11099      }
11100      return ast_context()->ReturnValue(Pop());
11101    }
11102
11103    // We need an extra block to maintain edge-split form.
11104    HBasicBlock* empty_block = graph()->CreateBasicBlock();
11105    HBasicBlock* eval_right = graph()->CreateBasicBlock();
11106    ToBooleanHints expected(expr->left()->to_boolean_types());
11107    HBranch* test = is_logical_and
11108        ? New<HBranch>(left_value, expected, eval_right, empty_block)
11109        : New<HBranch>(left_value, expected, empty_block, eval_right);
11110    FinishCurrentBlock(test);
11111
11112    set_current_block(eval_right);
11113    Drop(1);  // Value of the left subexpression.
11114    CHECK_BAILOUT(VisitForValue(expr->right()));
11115
11116    HBasicBlock* join_block =
11117      CreateJoin(empty_block, current_block(), expr->id());
11118    set_current_block(join_block);
11119    return ast_context()->ReturnValue(Pop());
11120
11121  } else {
11122    DCHECK(ast_context()->IsEffect());
11123    // In an effect context, we don't need the value of the left subexpression,
11124    // only its control flow and side effects.  We need an extra block to
11125    // maintain edge-split form.
11126    HBasicBlock* empty_block = graph()->CreateBasicBlock();
11127    HBasicBlock* right_block = graph()->CreateBasicBlock();
11128    if (is_logical_and) {
11129      CHECK_BAILOUT(VisitForControl(expr->left(), right_block, empty_block));
11130    } else {
11131      CHECK_BAILOUT(VisitForControl(expr->left(), empty_block, right_block));
11132    }
11133
11134    // TODO(kmillikin): Find a way to fix this.  It's ugly that there are
11135    // actually two empty blocks (one here and one inserted by
11136    // TestContext::BuildBranch, and that they both have an HSimulate though the
11137    // second one is not a merge node, and that we really have no good AST ID to
11138    // put on that first HSimulate.
11139
11140    // Technically, we should be able to handle the case when one side of
11141    // the test is not connected, but this can trip up liveness analysis
11142    // if we did not fully connect the test context based on some optimistic
11143    // assumption. If such an assumption was violated, we would end up with
11144    // an environment with optimized-out values. So we should always
11145    // conservatively connect the test context.
11146
11147    CHECK(right_block->HasPredecessor());
11148    CHECK(empty_block->HasPredecessor());
11149
11150    empty_block->SetJoinId(expr->id());
11151
11152    right_block->SetJoinId(expr->RightId());
11153    set_current_block(right_block);
11154    CHECK_BAILOUT(VisitForEffect(expr->right()));
11155    right_block = current_block();
11156
11157    HBasicBlock* join_block =
11158      CreateJoin(empty_block, right_block, expr->id());
11159    set_current_block(join_block);
11160    // We did not materialize any value in the predecessor environments,
11161    // so there is no need to handle it here.
11162  }
11163}
11164
11165
11166void HOptimizedGraphBuilder::VisitArithmeticExpression(BinaryOperation* expr) {
11167  CHECK_ALIVE(VisitForValue(expr->left()));
11168  CHECK_ALIVE(VisitForValue(expr->right()));
11169  SetSourcePosition(expr->position());
11170  HValue* right = Pop();
11171  HValue* left = Pop();
11172  HValue* result =
11173      BuildBinaryOperation(expr, left, right,
11174          ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11175                                    : PUSH_BEFORE_SIMULATE);
11176  return ast_context()->ReturnValue(result);
11177}
11178
11179
11180void HOptimizedGraphBuilder::HandleLiteralCompareTypeof(CompareOperation* expr,
11181                                                        Expression* sub_expr,
11182                                                        Handle<String> check) {
11183  CHECK_ALIVE(VisitForTypeOf(sub_expr));
11184  SetSourcePosition(expr->position());
11185  HValue* value = Pop();
11186  HTypeofIsAndBranch* instr = New<HTypeofIsAndBranch>(value, check);
11187  return ast_context()->ReturnControl(instr, expr->id());
11188}
11189
11190namespace {
11191
11192bool IsLiteralCompareStrict(Isolate* isolate, HValue* left, Token::Value op,
11193                            HValue* right) {
11194  return op == Token::EQ_STRICT &&
11195         ((left->IsConstant() &&
11196           !HConstant::cast(left)->handle(isolate)->IsNumber() &&
11197           !HConstant::cast(left)->handle(isolate)->IsString()) ||
11198          (right->IsConstant() &&
11199           !HConstant::cast(right)->handle(isolate)->IsNumber() &&
11200           !HConstant::cast(right)->handle(isolate)->IsString()));
11201}
11202
11203}  // namespace
11204
11205void HOptimizedGraphBuilder::VisitCompareOperation(CompareOperation* expr) {
11206  DCHECK(!HasStackOverflow());
11207  DCHECK(current_block() != NULL);
11208  DCHECK(current_block()->HasPredecessor());
11209
11210  if (!is_tracking_positions()) SetSourcePosition(expr->position());
11211
11212  // Check for a few fast cases. The AST visiting behavior must be in sync
11213  // with the full codegen: We don't push both left and right values onto
11214  // the expression stack when one side is a special-case literal.
11215  Expression* sub_expr = NULL;
11216  Handle<String> check;
11217  if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
11218    return HandleLiteralCompareTypeof(expr, sub_expr, check);
11219  }
11220  if (expr->IsLiteralCompareUndefined(&sub_expr)) {
11221    return HandleLiteralCompareNil(expr, sub_expr, kUndefinedValue);
11222  }
11223  if (expr->IsLiteralCompareNull(&sub_expr)) {
11224    return HandleLiteralCompareNil(expr, sub_expr, kNullValue);
11225  }
11226
11227  if (IsClassOfTest(expr)) {
11228    CallRuntime* call = expr->left()->AsCallRuntime();
11229    DCHECK(call->arguments()->length() == 1);
11230    CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
11231    HValue* value = Pop();
11232    Literal* literal = expr->right()->AsLiteral();
11233    Handle<String> rhs = Handle<String>::cast(literal->value());
11234    HClassOfTestAndBranch* instr = New<HClassOfTestAndBranch>(value, rhs);
11235    return ast_context()->ReturnControl(instr, expr->id());
11236  }
11237
11238  AstType* left_type = bounds_.get(expr->left()).lower;
11239  AstType* right_type = bounds_.get(expr->right()).lower;
11240  AstType* combined_type = expr->combined_type();
11241
11242  CHECK_ALIVE(VisitForValue(expr->left()));
11243  CHECK_ALIVE(VisitForValue(expr->right()));
11244
11245  HValue* right = Pop();
11246  HValue* left = Pop();
11247  Token::Value op = expr->op();
11248
11249  if (IsLiteralCompareStrict(isolate(), left, op, right)) {
11250    HCompareObjectEqAndBranch* result =
11251        New<HCompareObjectEqAndBranch>(left, right);
11252    return ast_context()->ReturnControl(result, expr->id());
11253  }
11254
11255  if (op == Token::INSTANCEOF) {
11256    // Check to see if the rhs of the instanceof is a known function.
11257    if (right->IsConstant() &&
11258        HConstant::cast(right)->handle(isolate())->IsJSFunction()) {
11259      Handle<JSFunction> function =
11260          Handle<JSFunction>::cast(HConstant::cast(right)->handle(isolate()));
11261      // Make sure that the {function} already has a meaningful initial map
11262      // (i.e. we constructed at least one instance using the constructor
11263      // {function}), and has an instance as .prototype.
11264      if (function->has_initial_map() &&
11265          !function->map()->has_non_instance_prototype()) {
11266        // Lookup @@hasInstance on the {function}.
11267        Handle<Map> function_map(function->map(), isolate());
11268        PropertyAccessInfo has_instance(
11269            this, LOAD, function_map,
11270            isolate()->factory()->has_instance_symbol());
11271        // Check if we are using the Function.prototype[@@hasInstance].
11272        if (has_instance.CanAccessMonomorphic() &&
11273            has_instance.IsDataConstant() &&
11274            has_instance.constant().is_identical_to(
11275                isolate()->function_has_instance())) {
11276          // Add appropriate receiver map check and prototype chain
11277          // checks to guard the @@hasInstance lookup chain.
11278          AddCheckMap(right, function_map);
11279          if (has_instance.has_holder()) {
11280            Handle<JSObject> prototype(
11281                JSObject::cast(has_instance.map()->prototype()), isolate());
11282            BuildCheckPrototypeMaps(prototype, has_instance.holder());
11283          }
11284          // Perform the prototype chain walk.
11285          Handle<Map> initial_map(function->initial_map(), isolate());
11286          top_info()->dependencies()->AssumeInitialMapCantChange(initial_map);
11287          HInstruction* prototype =
11288              Add<HConstant>(handle(initial_map->prototype(), isolate()));
11289          HHasInPrototypeChainAndBranch* result =
11290              New<HHasInPrototypeChainAndBranch>(left, prototype);
11291          return ast_context()->ReturnControl(result, expr->id());
11292        }
11293      }
11294    }
11295
11296    Callable callable = CodeFactory::InstanceOf(isolate());
11297    HValue* stub = Add<HConstant>(callable.code());
11298    HValue* values[] = {left, right};
11299    HCallWithDescriptor* result = New<HCallWithDescriptor>(
11300        stub, 0, callable.descriptor(), ArrayVector(values));
11301    result->set_type(HType::Boolean());
11302    return ast_context()->ReturnInstruction(result, expr->id());
11303
11304  } else if (op == Token::IN) {
11305    Callable callable = CodeFactory::HasProperty(isolate());
11306    HValue* stub = Add<HConstant>(callable.code());
11307    HValue* values[] = {left, right};
11308    HInstruction* result =
11309        New<HCallWithDescriptor>(stub, 0, callable.descriptor(),
11310                                 Vector<HValue*>(values, arraysize(values)));
11311    return ast_context()->ReturnInstruction(result, expr->id());
11312  }
11313
11314  PushBeforeSimulateBehavior push_behavior =
11315    ast_context()->IsEffect() ? NO_PUSH_BEFORE_SIMULATE
11316                              : PUSH_BEFORE_SIMULATE;
11317  HControlInstruction* compare = BuildCompareInstruction(
11318      op, left, right, left_type, right_type, combined_type,
11319      ScriptPositionToSourcePosition(expr->left()->position()),
11320      ScriptPositionToSourcePosition(expr->right()->position()),
11321      push_behavior, expr->id());
11322  if (compare == NULL) return;  // Bailed out.
11323  return ast_context()->ReturnControl(compare, expr->id());
11324}
11325
11326HControlInstruction* HOptimizedGraphBuilder::BuildCompareInstruction(
11327    Token::Value op, HValue* left, HValue* right, AstType* left_type,
11328    AstType* right_type, AstType* combined_type, SourcePosition left_position,
11329    SourcePosition right_position, PushBeforeSimulateBehavior push_sim_result,
11330    BailoutId bailout_id) {
11331  // Cases handled below depend on collected type feedback. They should
11332  // soft deoptimize when there is no type feedback.
11333  if (!combined_type->IsInhabited()) {
11334    Add<HDeoptimize>(
11335        DeoptimizeReason::
11336            kInsufficientTypeFeedbackForCombinedTypeOfBinaryOperation,
11337        Deoptimizer::SOFT);
11338    combined_type = left_type = right_type = AstType::Any();
11339  }
11340
11341  Representation left_rep = RepresentationFor(left_type);
11342  Representation right_rep = RepresentationFor(right_type);
11343  Representation combined_rep = RepresentationFor(combined_type);
11344
11345  if (combined_type->Is(AstType::Receiver())) {
11346    if (Token::IsEqualityOp(op)) {
11347      // HCompareObjectEqAndBranch can only deal with object, so
11348      // exclude numbers.
11349      if ((left->IsConstant() &&
11350           HConstant::cast(left)->HasNumberValue()) ||
11351          (right->IsConstant() &&
11352           HConstant::cast(right)->HasNumberValue())) {
11353        Add<HDeoptimize>(
11354            DeoptimizeReason::kTypeMismatchBetweenFeedbackAndConstant,
11355            Deoptimizer::SOFT);
11356        // The caller expects a branch instruction, so make it happy.
11357        return New<HBranch>(graph()->GetConstantTrue());
11358      }
11359      if (op == Token::EQ) {
11360        // For abstract equality we need to check both sides are receivers.
11361        if (combined_type->IsClass()) {
11362          Handle<Map> map = combined_type->AsClass()->Map();
11363          AddCheckMap(left, map);
11364          AddCheckMap(right, map);
11365        } else {
11366          BuildCheckHeapObject(left);
11367          Add<HCheckInstanceType>(left, HCheckInstanceType::IS_JS_RECEIVER);
11368          BuildCheckHeapObject(right);
11369          Add<HCheckInstanceType>(right, HCheckInstanceType::IS_JS_RECEIVER);
11370        }
11371      } else {
11372        // For strict equality we only need to check one side.
11373        HValue* operand_to_check =
11374            left->block()->block_id() < right->block()->block_id() ? left
11375                                                                   : right;
11376        if (combined_type->IsClass()) {
11377          Handle<Map> map = combined_type->AsClass()->Map();
11378          AddCheckMap(operand_to_check, map);
11379        } else {
11380          BuildCheckHeapObject(operand_to_check);
11381          Add<HCheckInstanceType>(operand_to_check,
11382                                  HCheckInstanceType::IS_JS_RECEIVER);
11383        }
11384      }
11385      HCompareObjectEqAndBranch* result =
11386          New<HCompareObjectEqAndBranch>(left, right);
11387      return result;
11388    } else {
11389      if (combined_type->IsClass()) {
11390        // TODO(bmeurer): This is an optimized version of an x < y, x > y,
11391        // x <= y or x >= y, where both x and y are spec objects with the
11392        // same map. The CompareIC collects this map for us. So if we know
11393        // that there's no @@toPrimitive on the map (including the prototype
11394        // chain), and both valueOf and toString are the default initial
11395        // implementations (on the %ObjectPrototype%), then we can reduce
11396        // the comparison to map checks on x and y, because the comparison
11397        // will turn into a comparison of "[object CLASS]" to itself (the
11398        // default outcome of toString, since valueOf returns a spec object).
11399        // This is pretty much adhoc, so in TurboFan we could do a lot better
11400        // and inline the interesting parts of ToPrimitive (actually we could
11401        // even do that in Crankshaft but we don't want to waste too much
11402        // time on this now).
11403        DCHECK(Token::IsOrderedRelationalCompareOp(op));
11404        Handle<Map> map = combined_type->AsClass()->Map();
11405        PropertyAccessInfo value_of(this, LOAD, map,
11406                                    isolate()->factory()->valueOf_string());
11407        PropertyAccessInfo to_primitive(
11408            this, LOAD, map, isolate()->factory()->to_primitive_symbol());
11409        PropertyAccessInfo to_string(this, LOAD, map,
11410                                     isolate()->factory()->toString_string());
11411        PropertyAccessInfo to_string_tag(
11412            this, LOAD, map, isolate()->factory()->to_string_tag_symbol());
11413        if (to_primitive.CanAccessMonomorphic() && !to_primitive.IsFound() &&
11414            to_string_tag.CanAccessMonomorphic() &&
11415            (!to_string_tag.IsFound() || to_string_tag.IsData() ||
11416             to_string_tag.IsDataConstant()) &&
11417            value_of.CanAccessMonomorphic() && value_of.IsDataConstant() &&
11418            value_of.constant().is_identical_to(isolate()->object_value_of()) &&
11419            to_string.CanAccessMonomorphic() && to_string.IsDataConstant() &&
11420            to_string.constant().is_identical_to(
11421                isolate()->object_to_string())) {
11422          // We depend on the prototype chain to stay the same, because we
11423          // also need to deoptimize when someone installs @@toPrimitive
11424          // or @@toStringTag somewhere in the prototype chain.
11425          Handle<Object> prototype(map->prototype(), isolate());
11426          if (prototype->IsJSObject()) {
11427            BuildCheckPrototypeMaps(Handle<JSObject>::cast(prototype),
11428                                    Handle<JSObject>::null());
11429          }
11430          AddCheckMap(left, map);
11431          AddCheckMap(right, map);
11432          // The caller expects a branch instruction, so make it happy.
11433          return New<HBranch>(
11434              graph()->GetConstantBool(op == Token::LTE || op == Token::GTE));
11435        }
11436      }
11437      Bailout(kUnsupportedNonPrimitiveCompare);
11438      return NULL;
11439    }
11440  } else if (combined_type->Is(AstType::InternalizedString()) &&
11441             Token::IsEqualityOp(op)) {
11442    // If we have a constant argument, it should be consistent with the type
11443    // feedback (otherwise we fail assertions in HCompareObjectEqAndBranch).
11444    if ((left->IsConstant() &&
11445         !HConstant::cast(left)->HasInternalizedStringValue()) ||
11446        (right->IsConstant() &&
11447         !HConstant::cast(right)->HasInternalizedStringValue())) {
11448      Add<HDeoptimize>(
11449          DeoptimizeReason::kTypeMismatchBetweenFeedbackAndConstant,
11450          Deoptimizer::SOFT);
11451      // The caller expects a branch instruction, so make it happy.
11452      return New<HBranch>(graph()->GetConstantTrue());
11453    }
11454    BuildCheckHeapObject(left);
11455    Add<HCheckInstanceType>(left, HCheckInstanceType::IS_INTERNALIZED_STRING);
11456    BuildCheckHeapObject(right);
11457    Add<HCheckInstanceType>(right, HCheckInstanceType::IS_INTERNALIZED_STRING);
11458    HCompareObjectEqAndBranch* result =
11459        New<HCompareObjectEqAndBranch>(left, right);
11460    return result;
11461  } else if (combined_type->Is(AstType::String())) {
11462    BuildCheckHeapObject(left);
11463    Add<HCheckInstanceType>(left, HCheckInstanceType::IS_STRING);
11464    BuildCheckHeapObject(right);
11465    Add<HCheckInstanceType>(right, HCheckInstanceType::IS_STRING);
11466    HStringCompareAndBranch* result =
11467        New<HStringCompareAndBranch>(left, right, op);
11468    return result;
11469  } else if (combined_type->Is(AstType::Boolean())) {
11470    AddCheckMap(left, isolate()->factory()->boolean_map());
11471    AddCheckMap(right, isolate()->factory()->boolean_map());
11472    if (Token::IsEqualityOp(op)) {
11473      HCompareObjectEqAndBranch* result =
11474          New<HCompareObjectEqAndBranch>(left, right);
11475      return result;
11476    }
11477    left = Add<HLoadNamedField>(
11478        left, nullptr,
11479        HObjectAccess::ForOddballToNumber(Representation::Smi()));
11480    right = Add<HLoadNamedField>(
11481        right, nullptr,
11482        HObjectAccess::ForOddballToNumber(Representation::Smi()));
11483    HCompareNumericAndBranch* result =
11484        New<HCompareNumericAndBranch>(left, right, op);
11485    return result;
11486  } else {
11487    if (op == Token::EQ) {
11488      if (left->IsConstant() &&
11489          HConstant::cast(left)->GetInstanceType() == ODDBALL_TYPE &&
11490          HConstant::cast(left)->IsUndetectable()) {
11491        return New<HIsUndetectableAndBranch>(right);
11492      }
11493
11494      if (right->IsConstant() &&
11495          HConstant::cast(right)->GetInstanceType() == ODDBALL_TYPE &&
11496          HConstant::cast(right)->IsUndetectable()) {
11497        return New<HIsUndetectableAndBranch>(left);
11498      }
11499    }
11500
11501    if (combined_rep.IsTagged() || combined_rep.IsNone()) {
11502      HCompareGeneric* result = Add<HCompareGeneric>(left, right, op);
11503      result->set_observed_input_representation(1, left_rep);
11504      result->set_observed_input_representation(2, right_rep);
11505      if (result->HasObservableSideEffects()) {
11506        if (push_sim_result == PUSH_BEFORE_SIMULATE) {
11507          Push(result);
11508          AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11509          Drop(1);
11510        } else {
11511          AddSimulate(bailout_id, REMOVABLE_SIMULATE);
11512        }
11513      }
11514      // TODO(jkummerow): Can we make this more efficient?
11515      HBranch* branch = New<HBranch>(result);
11516      return branch;
11517    } else {
11518      HCompareNumericAndBranch* result =
11519          New<HCompareNumericAndBranch>(left, right, op);
11520      result->set_observed_input_representation(left_rep, right_rep);
11521      return result;
11522    }
11523  }
11524}
11525
11526
11527void HOptimizedGraphBuilder::HandleLiteralCompareNil(CompareOperation* expr,
11528                                                     Expression* sub_expr,
11529                                                     NilValue nil) {
11530  DCHECK(!HasStackOverflow());
11531  DCHECK(current_block() != NULL);
11532  DCHECK(current_block()->HasPredecessor());
11533  DCHECK(expr->op() == Token::EQ || expr->op() == Token::EQ_STRICT);
11534  if (!is_tracking_positions()) SetSourcePosition(expr->position());
11535  CHECK_ALIVE(VisitForValue(sub_expr));
11536  HValue* value = Pop();
11537  HControlInstruction* instr;
11538  if (expr->op() == Token::EQ_STRICT) {
11539    HConstant* nil_constant = nil == kNullValue
11540        ? graph()->GetConstantNull()
11541        : graph()->GetConstantUndefined();
11542    instr = New<HCompareObjectEqAndBranch>(value, nil_constant);
11543  } else {
11544    DCHECK_EQ(Token::EQ, expr->op());
11545    instr = New<HIsUndetectableAndBranch>(value);
11546  }
11547  return ast_context()->ReturnControl(instr, expr->id());
11548}
11549
11550
11551void HOptimizedGraphBuilder::VisitSpread(Spread* expr) { UNREACHABLE(); }
11552
11553
11554void HOptimizedGraphBuilder::VisitEmptyParentheses(EmptyParentheses* expr) {
11555  UNREACHABLE();
11556}
11557
11558void HOptimizedGraphBuilder::VisitGetIterator(GetIterator* expr) {
11559  UNREACHABLE();
11560}
11561
11562HValue* HOptimizedGraphBuilder::AddThisFunction() {
11563  return AddInstruction(BuildThisFunction());
11564}
11565
11566
11567HInstruction* HOptimizedGraphBuilder::BuildThisFunction() {
11568  // If we share optimized code between different closures, the
11569  // this-function is not a constant, except inside an inlined body.
11570  if (function_state()->outer() != NULL) {
11571      return New<HConstant>(
11572          function_state()->compilation_info()->closure());
11573  } else {
11574      return New<HThisFunction>();
11575  }
11576}
11577
11578
11579HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
11580    Handle<JSObject> boilerplate_object,
11581    AllocationSiteUsageContext* site_context) {
11582  NoObservableSideEffectsScope no_effects(this);
11583  Handle<Map> initial_map(boilerplate_object->map());
11584  InstanceType instance_type = initial_map->instance_type();
11585  DCHECK(instance_type == JS_ARRAY_TYPE || instance_type == JS_OBJECT_TYPE);
11586
11587  HType type = instance_type == JS_ARRAY_TYPE
11588      ? HType::JSArray() : HType::JSObject();
11589  HValue* object_size_constant = Add<HConstant>(initial_map->instance_size());
11590
11591  PretenureFlag pretenure_flag = NOT_TENURED;
11592  Handle<AllocationSite> top_site(*site_context->top(), isolate());
11593  if (FLAG_allocation_site_pretenuring) {
11594    pretenure_flag = top_site->GetPretenureMode();
11595  }
11596
11597  Handle<AllocationSite> current_site(*site_context->current(), isolate());
11598  if (*top_site == *current_site) {
11599    // We install a dependency for pretenuring only on the outermost literal.
11600    top_info()->dependencies()->AssumeTenuringDecision(top_site);
11601  }
11602  top_info()->dependencies()->AssumeTransitionStable(current_site);
11603
11604  HInstruction* object =
11605      Add<HAllocate>(object_size_constant, type, pretenure_flag, instance_type,
11606                     graph()->GetConstant0(), top_site);
11607
11608  // If allocation folding reaches kMaxRegularHeapObjectSize the
11609  // elements array may not get folded into the object. Hence, we set the
11610  // elements pointer to empty fixed array and let store elimination remove
11611  // this store in the folding case.
11612  HConstant* empty_fixed_array = Add<HConstant>(
11613      isolate()->factory()->empty_fixed_array());
11614  Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11615      empty_fixed_array);
11616
11617  BuildEmitObjectHeader(boilerplate_object, object);
11618
11619  // Similarly to the elements pointer, there is no guarantee that all
11620  // property allocations can get folded, so pre-initialize all in-object
11621  // properties to a safe value.
11622  BuildInitializeInobjectProperties(object, initial_map);
11623
11624  Handle<FixedArrayBase> elements(boilerplate_object->elements());
11625  int elements_size = (elements->length() > 0 &&
11626      elements->map() != isolate()->heap()->fixed_cow_array_map()) ?
11627          elements->Size() : 0;
11628
11629  if (pretenure_flag == TENURED &&
11630      elements->map() == isolate()->heap()->fixed_cow_array_map() &&
11631      isolate()->heap()->InNewSpace(*elements)) {
11632    // If we would like to pretenure a fixed cow array, we must ensure that the
11633    // array is already in old space, otherwise we'll create too many old-to-
11634    // new-space pointers (overflowing the store buffer).
11635    elements = Handle<FixedArrayBase>(
11636        isolate()->factory()->CopyAndTenureFixedCOWArray(
11637            Handle<FixedArray>::cast(elements)));
11638    boilerplate_object->set_elements(*elements);
11639  }
11640
11641  HInstruction* object_elements = NULL;
11642  if (elements_size > 0) {
11643    HValue* object_elements_size = Add<HConstant>(elements_size);
11644    InstanceType instance_type = boilerplate_object->HasFastDoubleElements()
11645        ? FIXED_DOUBLE_ARRAY_TYPE : FIXED_ARRAY_TYPE;
11646    object_elements = Add<HAllocate>(object_elements_size, HType::HeapObject(),
11647                                     pretenure_flag, instance_type,
11648                                     graph()->GetConstant0(), top_site);
11649    BuildEmitElements(boilerplate_object, elements, object_elements,
11650                      site_context);
11651    Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11652                          object_elements);
11653  } else {
11654    Handle<Object> elements_field =
11655        Handle<Object>(boilerplate_object->elements(), isolate());
11656    HInstruction* object_elements_cow = Add<HConstant>(elements_field);
11657    Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
11658                          object_elements_cow);
11659  }
11660
11661  // Copy in-object properties.
11662  if (initial_map->NumberOfFields() != 0 ||
11663      initial_map->unused_property_fields() > 0) {
11664    BuildEmitInObjectProperties(boilerplate_object, object, site_context,
11665                                pretenure_flag);
11666  }
11667  return object;
11668}
11669
11670
11671void HOptimizedGraphBuilder::BuildEmitObjectHeader(
11672    Handle<JSObject> boilerplate_object,
11673    HInstruction* object) {
11674  DCHECK(boilerplate_object->properties()->length() == 0);
11675
11676  Handle<Map> boilerplate_object_map(boilerplate_object->map());
11677  AddStoreMapConstant(object, boilerplate_object_map);
11678
11679  Handle<Object> properties_field =
11680      Handle<Object>(boilerplate_object->properties(), isolate());
11681  DCHECK(*properties_field == isolate()->heap()->empty_fixed_array());
11682  HInstruction* properties = Add<HConstant>(properties_field);
11683  HObjectAccess access = HObjectAccess::ForPropertiesPointer();
11684  Add<HStoreNamedField>(object, access, properties);
11685
11686  if (boilerplate_object->IsJSArray()) {
11687    Handle<JSArray> boilerplate_array =
11688        Handle<JSArray>::cast(boilerplate_object);
11689    Handle<Object> length_field =
11690        Handle<Object>(boilerplate_array->length(), isolate());
11691    HInstruction* length = Add<HConstant>(length_field);
11692
11693    DCHECK(boilerplate_array->length()->IsSmi());
11694    Add<HStoreNamedField>(object, HObjectAccess::ForArrayLength(
11695        boilerplate_array->GetElementsKind()), length);
11696  }
11697}
11698
11699
11700void HOptimizedGraphBuilder::BuildEmitInObjectProperties(
11701    Handle<JSObject> boilerplate_object,
11702    HInstruction* object,
11703    AllocationSiteUsageContext* site_context,
11704    PretenureFlag pretenure_flag) {
11705  Handle<Map> boilerplate_map(boilerplate_object->map());
11706  Handle<DescriptorArray> descriptors(boilerplate_map->instance_descriptors());
11707  int limit = boilerplate_map->NumberOfOwnDescriptors();
11708
11709  int copied_fields = 0;
11710  for (int i = 0; i < limit; i++) {
11711    PropertyDetails details = descriptors->GetDetails(i);
11712    if (details.location() != kField) continue;
11713    DCHECK_EQ(kData, details.kind());
11714    copied_fields++;
11715    FieldIndex field_index = FieldIndex::ForDescriptor(*boilerplate_map, i);
11716
11717
11718    int property_offset = field_index.offset();
11719    Handle<Name> name(descriptors->GetKey(i));
11720
11721    // The access for the store depends on the type of the boilerplate.
11722    HObjectAccess access = boilerplate_object->IsJSArray() ?
11723        HObjectAccess::ForJSArrayOffset(property_offset) :
11724        HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11725
11726    if (boilerplate_object->IsUnboxedDoubleField(field_index)) {
11727      CHECK(!boilerplate_object->IsJSArray());
11728      double value = boilerplate_object->RawFastDoublePropertyAt(field_index);
11729      access = access.WithRepresentation(Representation::Double());
11730      Add<HStoreNamedField>(object, access, Add<HConstant>(value));
11731      continue;
11732    }
11733    Handle<Object> value(boilerplate_object->RawFastPropertyAt(field_index),
11734                         isolate());
11735
11736    if (value->IsJSObject()) {
11737      Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11738      Handle<AllocationSite> current_site = site_context->EnterNewScope();
11739      HInstruction* result =
11740          BuildFastLiteral(value_object, site_context);
11741      site_context->ExitScope(current_site, value_object);
11742      Add<HStoreNamedField>(object, access, result);
11743    } else {
11744      Representation representation = details.representation();
11745      HInstruction* value_instruction;
11746
11747      if (representation.IsDouble()) {
11748        // Allocate a HeapNumber box and store the value into it.
11749        HValue* heap_number_constant = Add<HConstant>(HeapNumber::kSize);
11750        HInstruction* double_box = Add<HAllocate>(
11751            heap_number_constant, HType::HeapObject(), pretenure_flag,
11752            MUTABLE_HEAP_NUMBER_TYPE, graph()->GetConstant0());
11753        AddStoreMapConstant(double_box,
11754            isolate()->factory()->mutable_heap_number_map());
11755        // Unwrap the mutable heap number from the boilerplate.
11756        HValue* double_value =
11757            Add<HConstant>(Handle<HeapNumber>::cast(value)->value());
11758        Add<HStoreNamedField>(
11759            double_box, HObjectAccess::ForHeapNumberValue(), double_value);
11760        value_instruction = double_box;
11761      } else if (representation.IsSmi()) {
11762        value_instruction = value->IsUninitialized(isolate())
11763                                ? graph()->GetConstant0()
11764                                : Add<HConstant>(value);
11765        // Ensure that value is stored as smi.
11766        access = access.WithRepresentation(representation);
11767      } else {
11768        value_instruction = Add<HConstant>(value);
11769      }
11770
11771      Add<HStoreNamedField>(object, access, value_instruction);
11772    }
11773  }
11774
11775  int inobject_properties = boilerplate_object->map()->GetInObjectProperties();
11776  HInstruction* value_instruction =
11777      Add<HConstant>(isolate()->factory()->one_pointer_filler_map());
11778  for (int i = copied_fields; i < inobject_properties; i++) {
11779    DCHECK(boilerplate_object->IsJSObject());
11780    int property_offset = boilerplate_object->GetInObjectPropertyOffset(i);
11781    HObjectAccess access =
11782        HObjectAccess::ForMapAndOffset(boilerplate_map, property_offset);
11783    Add<HStoreNamedField>(object, access, value_instruction);
11784  }
11785}
11786
11787
11788void HOptimizedGraphBuilder::BuildEmitElements(
11789    Handle<JSObject> boilerplate_object,
11790    Handle<FixedArrayBase> elements,
11791    HValue* object_elements,
11792    AllocationSiteUsageContext* site_context) {
11793  ElementsKind kind = boilerplate_object->map()->elements_kind();
11794  int elements_length = elements->length();
11795  HValue* object_elements_length = Add<HConstant>(elements_length);
11796  BuildInitializeElementsHeader(object_elements, kind, object_elements_length);
11797
11798  // Copy elements backing store content.
11799  if (elements->IsFixedDoubleArray()) {
11800    BuildEmitFixedDoubleArray(elements, kind, object_elements);
11801  } else if (elements->IsFixedArray()) {
11802    BuildEmitFixedArray(elements, kind, object_elements,
11803                        site_context);
11804  } else {
11805    UNREACHABLE();
11806  }
11807}
11808
11809
11810void HOptimizedGraphBuilder::BuildEmitFixedDoubleArray(
11811    Handle<FixedArrayBase> elements,
11812    ElementsKind kind,
11813    HValue* object_elements) {
11814  HInstruction* boilerplate_elements = Add<HConstant>(elements);
11815  int elements_length = elements->length();
11816  for (int i = 0; i < elements_length; i++) {
11817    HValue* key_constant = Add<HConstant>(i);
11818    HInstruction* value_instruction =
11819        Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
11820                        kind, ALLOW_RETURN_HOLE);
11821    HInstruction* store = Add<HStoreKeyed>(object_elements, key_constant,
11822                                           value_instruction, nullptr, kind);
11823    store->SetFlag(HValue::kTruncatingToNumber);
11824  }
11825}
11826
11827
11828void HOptimizedGraphBuilder::BuildEmitFixedArray(
11829    Handle<FixedArrayBase> elements,
11830    ElementsKind kind,
11831    HValue* object_elements,
11832    AllocationSiteUsageContext* site_context) {
11833  HInstruction* boilerplate_elements = Add<HConstant>(elements);
11834  int elements_length = elements->length();
11835  Handle<FixedArray> fast_elements = Handle<FixedArray>::cast(elements);
11836  for (int i = 0; i < elements_length; i++) {
11837    Handle<Object> value(fast_elements->get(i), isolate());
11838    HValue* key_constant = Add<HConstant>(i);
11839    if (value->IsJSObject()) {
11840      Handle<JSObject> value_object = Handle<JSObject>::cast(value);
11841      Handle<AllocationSite> current_site = site_context->EnterNewScope();
11842      HInstruction* result =
11843          BuildFastLiteral(value_object, site_context);
11844      site_context->ExitScope(current_site, value_object);
11845      Add<HStoreKeyed>(object_elements, key_constant, result, nullptr, kind);
11846    } else {
11847      ElementsKind copy_kind =
11848          kind == FAST_HOLEY_SMI_ELEMENTS ? FAST_HOLEY_ELEMENTS : kind;
11849      HInstruction* value_instruction =
11850          Add<HLoadKeyed>(boilerplate_elements, key_constant, nullptr, nullptr,
11851                          copy_kind, ALLOW_RETURN_HOLE);
11852      Add<HStoreKeyed>(object_elements, key_constant, value_instruction,
11853                       nullptr, copy_kind);
11854    }
11855  }
11856}
11857
11858
11859void HOptimizedGraphBuilder::VisitThisFunction(ThisFunction* expr) {
11860  DCHECK(!HasStackOverflow());
11861  DCHECK(current_block() != NULL);
11862  DCHECK(current_block()->HasPredecessor());
11863  HInstruction* instr = BuildThisFunction();
11864  return ast_context()->ReturnInstruction(instr, expr->id());
11865}
11866
11867
11868void HOptimizedGraphBuilder::VisitSuperPropertyReference(
11869    SuperPropertyReference* expr) {
11870  DCHECK(!HasStackOverflow());
11871  DCHECK(current_block() != NULL);
11872  DCHECK(current_block()->HasPredecessor());
11873  return Bailout(kSuperReference);
11874}
11875
11876
11877void HOptimizedGraphBuilder::VisitSuperCallReference(SuperCallReference* expr) {
11878  DCHECK(!HasStackOverflow());
11879  DCHECK(current_block() != NULL);
11880  DCHECK(current_block()->HasPredecessor());
11881  return Bailout(kSuperReference);
11882}
11883
11884void HOptimizedGraphBuilder::VisitDeclarations(
11885    Declaration::List* declarations) {
11886  DCHECK(globals_.is_empty());
11887  AstVisitor<HOptimizedGraphBuilder>::VisitDeclarations(declarations);
11888  if (!globals_.is_empty()) {
11889    Handle<FixedArray> array =
11890       isolate()->factory()->NewFixedArray(globals_.length(), TENURED);
11891    for (int i = 0; i < globals_.length(); ++i) array->set(i, *globals_.at(i));
11892    int flags = current_info()->GetDeclareGlobalsFlags();
11893    Handle<FeedbackVector> vector(current_feedback_vector(), isolate());
11894    Add<HDeclareGlobals>(array, flags, vector);
11895    globals_.Rewind(0);
11896  }
11897}
11898
11899
11900void HOptimizedGraphBuilder::VisitVariableDeclaration(
11901    VariableDeclaration* declaration) {
11902  VariableProxy* proxy = declaration->proxy();
11903  Variable* variable = proxy->var();
11904  switch (variable->location()) {
11905    case VariableLocation::UNALLOCATED: {
11906      DCHECK(!variable->binding_needs_init());
11907      globals_.Add(variable->name(), zone());
11908      FeedbackSlot slot = proxy->VariableFeedbackSlot();
11909      DCHECK(!slot.IsInvalid());
11910      globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
11911      globals_.Add(isolate()->factory()->undefined_value(), zone());
11912      globals_.Add(isolate()->factory()->undefined_value(), zone());
11913      return;
11914    }
11915    case VariableLocation::PARAMETER:
11916    case VariableLocation::LOCAL:
11917      if (variable->binding_needs_init()) {
11918        HValue* value = graph()->GetConstantHole();
11919        environment()->Bind(variable, value);
11920      }
11921      break;
11922    case VariableLocation::CONTEXT:
11923      if (variable->binding_needs_init()) {
11924        HValue* value = graph()->GetConstantHole();
11925        HValue* context = environment()->context();
11926        HStoreContextSlot* store = Add<HStoreContextSlot>(
11927            context, variable->index(), HStoreContextSlot::kNoCheck, value);
11928        if (store->HasObservableSideEffects()) {
11929          Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11930        }
11931      }
11932      break;
11933    case VariableLocation::LOOKUP:
11934      return Bailout(kUnsupportedLookupSlotInDeclaration);
11935    case VariableLocation::MODULE:
11936      UNREACHABLE();
11937  }
11938}
11939
11940
11941void HOptimizedGraphBuilder::VisitFunctionDeclaration(
11942    FunctionDeclaration* declaration) {
11943  VariableProxy* proxy = declaration->proxy();
11944  Variable* variable = proxy->var();
11945  switch (variable->location()) {
11946    case VariableLocation::UNALLOCATED: {
11947      globals_.Add(variable->name(), zone());
11948      FeedbackSlot slot = proxy->VariableFeedbackSlot();
11949      DCHECK(!slot.IsInvalid());
11950      globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
11951
11952      // We need the slot where the literals array lives, too.
11953      slot = declaration->fun()->LiteralFeedbackSlot();
11954      DCHECK(!slot.IsInvalid());
11955      globals_.Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
11956
11957      Handle<SharedFunctionInfo> function = Compiler::GetSharedFunctionInfo(
11958          declaration->fun(), current_info()->script(), top_info());
11959      // Check for stack-overflow exception.
11960      if (function.is_null()) return SetStackOverflow();
11961      globals_.Add(function, zone());
11962      return;
11963    }
11964    case VariableLocation::PARAMETER:
11965    case VariableLocation::LOCAL: {
11966      CHECK_ALIVE(VisitForValue(declaration->fun()));
11967      HValue* value = Pop();
11968      BindIfLive(variable, value);
11969      break;
11970    }
11971    case VariableLocation::CONTEXT: {
11972      CHECK_ALIVE(VisitForValue(declaration->fun()));
11973      HValue* value = Pop();
11974      HValue* context = environment()->context();
11975      HStoreContextSlot* store = Add<HStoreContextSlot>(
11976          context, variable->index(), HStoreContextSlot::kNoCheck, value);
11977      if (store->HasObservableSideEffects()) {
11978        Add<HSimulate>(proxy->id(), REMOVABLE_SIMULATE);
11979      }
11980      break;
11981    }
11982    case VariableLocation::LOOKUP:
11983      return Bailout(kUnsupportedLookupSlotInDeclaration);
11984    case VariableLocation::MODULE:
11985      UNREACHABLE();
11986  }
11987}
11988
11989
11990void HOptimizedGraphBuilder::VisitRewritableExpression(
11991    RewritableExpression* node) {
11992  CHECK_ALIVE(Visit(node->expression()));
11993}
11994
11995
11996// Generators for inline runtime functions.
11997// Support for types.
11998void HOptimizedGraphBuilder::GenerateIsSmi(CallRuntime* call) {
11999  DCHECK(call->arguments()->length() == 1);
12000  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12001  HValue* value = Pop();
12002  HIsSmiAndBranch* result = New<HIsSmiAndBranch>(value);
12003  return ast_context()->ReturnControl(result, call->id());
12004}
12005
12006
12007void HOptimizedGraphBuilder::GenerateIsJSReceiver(CallRuntime* call) {
12008  DCHECK(call->arguments()->length() == 1);
12009  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12010  HValue* value = Pop();
12011  HHasInstanceTypeAndBranch* result =
12012      New<HHasInstanceTypeAndBranch>(value,
12013                                     FIRST_JS_RECEIVER_TYPE,
12014                                     LAST_JS_RECEIVER_TYPE);
12015  return ast_context()->ReturnControl(result, call->id());
12016}
12017
12018void HOptimizedGraphBuilder::GenerateIsArray(CallRuntime* call) {
12019  DCHECK(call->arguments()->length() == 1);
12020  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12021  HValue* value = Pop();
12022  HHasInstanceTypeAndBranch* result =
12023      New<HHasInstanceTypeAndBranch>(value, JS_ARRAY_TYPE);
12024  return ast_context()->ReturnControl(result, call->id());
12025}
12026
12027
12028void HOptimizedGraphBuilder::GenerateIsTypedArray(CallRuntime* call) {
12029  DCHECK(call->arguments()->length() == 1);
12030  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12031  HValue* value = Pop();
12032  HHasInstanceTypeAndBranch* result =
12033      New<HHasInstanceTypeAndBranch>(value, JS_TYPED_ARRAY_TYPE);
12034  return ast_context()->ReturnControl(result, call->id());
12035}
12036
12037
12038void HOptimizedGraphBuilder::GenerateToInteger(CallRuntime* call) {
12039  DCHECK_EQ(1, call->arguments()->length());
12040  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12041  HValue* input = Pop();
12042  if (input->type().IsSmi()) {
12043    return ast_context()->ReturnValue(input);
12044  } else {
12045    Callable callable = CodeFactory::ToInteger(isolate());
12046    HValue* stub = Add<HConstant>(callable.code());
12047    HValue* values[] = {input};
12048    HInstruction* result = New<HCallWithDescriptor>(
12049        stub, 0, callable.descriptor(), ArrayVector(values));
12050    return ast_context()->ReturnInstruction(result, call->id());
12051  }
12052}
12053
12054
12055void HOptimizedGraphBuilder::GenerateToObject(CallRuntime* call) {
12056  DCHECK_EQ(1, call->arguments()->length());
12057  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12058  HValue* value = Pop();
12059  HValue* result = BuildToObject(value);
12060  return ast_context()->ReturnValue(result);
12061}
12062
12063
12064void HOptimizedGraphBuilder::GenerateToString(CallRuntime* call) {
12065  DCHECK_EQ(1, call->arguments()->length());
12066  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12067  HValue* input = Pop();
12068  if (input->type().IsString()) {
12069    return ast_context()->ReturnValue(input);
12070  } else {
12071    Callable callable = CodeFactory::ToString(isolate());
12072    HValue* stub = Add<HConstant>(callable.code());
12073    HValue* values[] = {input};
12074    HInstruction* result = New<HCallWithDescriptor>(
12075        stub, 0, callable.descriptor(), ArrayVector(values));
12076    return ast_context()->ReturnInstruction(result, call->id());
12077  }
12078}
12079
12080
12081void HOptimizedGraphBuilder::GenerateToLength(CallRuntime* call) {
12082  DCHECK_EQ(1, call->arguments()->length());
12083  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12084  Callable callable = CodeFactory::ToLength(isolate());
12085  HValue* input = Pop();
12086  HValue* stub = Add<HConstant>(callable.code());
12087  HValue* values[] = {input};
12088  HInstruction* result = New<HCallWithDescriptor>(
12089      stub, 0, callable.descriptor(), ArrayVector(values));
12090  return ast_context()->ReturnInstruction(result, call->id());
12091}
12092
12093
12094void HOptimizedGraphBuilder::GenerateToNumber(CallRuntime* call) {
12095  DCHECK_EQ(1, call->arguments()->length());
12096  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12097  Callable callable = CodeFactory::ToNumber(isolate());
12098  HValue* input = Pop();
12099  HValue* result = BuildToNumber(input);
12100  if (result->HasObservableSideEffects()) {
12101    if (!ast_context()->IsEffect()) Push(result);
12102    Add<HSimulate>(call->id(), REMOVABLE_SIMULATE);
12103    if (!ast_context()->IsEffect()) result = Pop();
12104  }
12105  return ast_context()->ReturnValue(result);
12106}
12107
12108
12109void HOptimizedGraphBuilder::GenerateIsJSProxy(CallRuntime* call) {
12110  DCHECK(call->arguments()->length() == 1);
12111  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12112  HValue* value = Pop();
12113  HIfContinuation continuation;
12114  IfBuilder if_proxy(this);
12115
12116  HValue* smicheck = if_proxy.IfNot<HIsSmiAndBranch>(value);
12117  if_proxy.And();
12118  HValue* map = Add<HLoadNamedField>(value, smicheck, HObjectAccess::ForMap());
12119  HValue* instance_type =
12120      Add<HLoadNamedField>(map, nullptr, HObjectAccess::ForMapInstanceType());
12121  if_proxy.If<HCompareNumericAndBranch>(
12122      instance_type, Add<HConstant>(JS_PROXY_TYPE), Token::EQ);
12123
12124  if_proxy.CaptureContinuation(&continuation);
12125  return ast_context()->ReturnContinuation(&continuation, call->id());
12126}
12127
12128
12129void HOptimizedGraphBuilder::GenerateHasFastPackedElements(CallRuntime* call) {
12130  DCHECK(call->arguments()->length() == 1);
12131  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12132  HValue* object = Pop();
12133  HIfContinuation continuation(graph()->CreateBasicBlock(),
12134                               graph()->CreateBasicBlock());
12135  IfBuilder if_not_smi(this);
12136  if_not_smi.IfNot<HIsSmiAndBranch>(object);
12137  if_not_smi.Then();
12138  {
12139    NoObservableSideEffectsScope no_effects(this);
12140
12141    IfBuilder if_fast_packed(this);
12142    HValue* elements_kind = BuildGetElementsKind(object);
12143    if_fast_packed.If<HCompareNumericAndBranch>(
12144        elements_kind, Add<HConstant>(FAST_SMI_ELEMENTS), Token::EQ);
12145    if_fast_packed.Or();
12146    if_fast_packed.If<HCompareNumericAndBranch>(
12147        elements_kind, Add<HConstant>(FAST_ELEMENTS), Token::EQ);
12148    if_fast_packed.Or();
12149    if_fast_packed.If<HCompareNumericAndBranch>(
12150        elements_kind, Add<HConstant>(FAST_DOUBLE_ELEMENTS), Token::EQ);
12151    if_fast_packed.JoinContinuation(&continuation);
12152  }
12153  if_not_smi.JoinContinuation(&continuation);
12154  return ast_context()->ReturnContinuation(&continuation, call->id());
12155}
12156
12157
12158// Fast support for charCodeAt(n).
12159void HOptimizedGraphBuilder::GenerateStringCharCodeAt(CallRuntime* call) {
12160  DCHECK(call->arguments()->length() == 2);
12161  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12162  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12163  HValue* index = Pop();
12164  HValue* string = Pop();
12165  HInstruction* result = BuildStringCharCodeAt(string, index);
12166  return ast_context()->ReturnInstruction(result, call->id());
12167}
12168
12169
12170// Fast support for SubString.
12171void HOptimizedGraphBuilder::GenerateSubString(CallRuntime* call) {
12172  DCHECK_EQ(3, call->arguments()->length());
12173  CHECK_ALIVE(VisitExpressions(call->arguments()));
12174  Callable callable = CodeFactory::SubString(isolate());
12175  HValue* stub = Add<HConstant>(callable.code());
12176  HValue* to = Pop();
12177  HValue* from = Pop();
12178  HValue* string = Pop();
12179  HValue* values[] = {string, from, to};
12180  HInstruction* result = New<HCallWithDescriptor>(
12181      stub, 0, callable.descriptor(), ArrayVector(values));
12182  result->set_type(HType::String());
12183  return ast_context()->ReturnInstruction(result, call->id());
12184}
12185
12186
12187// Fast support for calls.
12188void HOptimizedGraphBuilder::GenerateCall(CallRuntime* call) {
12189  DCHECK_LE(2, call->arguments()->length());
12190  CHECK_ALIVE(VisitExpressions(call->arguments()));
12191  CallTrampolineDescriptor descriptor(isolate());
12192  PushArgumentsFromEnvironment(call->arguments()->length() - 1);
12193  HValue* trampoline = Add<HConstant>(isolate()->builtins()->Call());
12194  HValue* target = Pop();
12195  HValue* values[] = {target, Add<HConstant>(call->arguments()->length() - 2)};
12196  HInstruction* result =
12197      New<HCallWithDescriptor>(trampoline, call->arguments()->length() - 1,
12198                               descriptor, ArrayVector(values));
12199  return ast_context()->ReturnInstruction(result, call->id());
12200}
12201
12202
12203void HOptimizedGraphBuilder::GenerateFixedArrayGet(CallRuntime* call) {
12204  DCHECK(call->arguments()->length() == 2);
12205  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12206  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12207  HValue* index = Pop();
12208  HValue* object = Pop();
12209  HInstruction* result = New<HLoadKeyed>(
12210      object, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE);
12211  return ast_context()->ReturnInstruction(result, call->id());
12212}
12213
12214
12215void HOptimizedGraphBuilder::GenerateFixedArraySet(CallRuntime* call) {
12216  DCHECK(call->arguments()->length() == 3);
12217  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12218  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12219  CHECK_ALIVE(VisitForValue(call->arguments()->at(2)));
12220  HValue* value = Pop();
12221  HValue* index = Pop();
12222  HValue* object = Pop();
12223  NoObservableSideEffectsScope no_effects(this);
12224  Add<HStoreKeyed>(object, index, value, nullptr, FAST_HOLEY_ELEMENTS);
12225  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12226}
12227
12228
12229void HOptimizedGraphBuilder::GenerateTheHole(CallRuntime* call) {
12230  DCHECK(call->arguments()->length() == 0);
12231  return ast_context()->ReturnValue(graph()->GetConstantHole());
12232}
12233
12234
12235void HOptimizedGraphBuilder::GenerateCreateIterResultObject(CallRuntime* call) {
12236  DCHECK_EQ(2, call->arguments()->length());
12237  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12238  CHECK_ALIVE(VisitForValue(call->arguments()->at(1)));
12239  HValue* done = Pop();
12240  HValue* value = Pop();
12241  HValue* result = BuildCreateIterResultObject(value, done);
12242  return ast_context()->ReturnValue(result);
12243}
12244
12245
12246void HOptimizedGraphBuilder::GenerateJSCollectionGetTable(CallRuntime* call) {
12247  DCHECK(call->arguments()->length() == 1);
12248  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12249  HValue* receiver = Pop();
12250  HInstruction* result = New<HLoadNamedField>(
12251      receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12252  return ast_context()->ReturnInstruction(result, call->id());
12253}
12254
12255
12256void HOptimizedGraphBuilder::GenerateStringGetRawHashField(CallRuntime* call) {
12257  DCHECK(call->arguments()->length() == 1);
12258  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12259  HValue* object = Pop();
12260  HInstruction* result = New<HLoadNamedField>(
12261      object, nullptr, HObjectAccess::ForStringHashField());
12262  return ast_context()->ReturnInstruction(result, call->id());
12263}
12264
12265
12266template <typename CollectionType>
12267HValue* HOptimizedGraphBuilder::BuildAllocateOrderedHashTable() {
12268  static const int kCapacity = CollectionType::kMinCapacity;
12269  static const int kBucketCount = kCapacity / CollectionType::kLoadFactor;
12270  static const int kFixedArrayLength = CollectionType::kHashTableStartIndex +
12271                                       kBucketCount +
12272                                       (kCapacity * CollectionType::kEntrySize);
12273  static const int kSizeInBytes =
12274      FixedArray::kHeaderSize + (kFixedArrayLength * kPointerSize);
12275
12276  // Allocate the table and add the proper map.
12277  HValue* table =
12278      Add<HAllocate>(Add<HConstant>(kSizeInBytes), HType::HeapObject(),
12279                     NOT_TENURED, FIXED_ARRAY_TYPE, graph()->GetConstant0());
12280  AddStoreMapConstant(table, isolate()->factory()->ordered_hash_table_map());
12281
12282  // Initialize the FixedArray...
12283  HValue* length = Add<HConstant>(kFixedArrayLength);
12284  Add<HStoreNamedField>(table, HObjectAccess::ForFixedArrayLength(), length);
12285
12286  // ...and the OrderedHashTable fields.
12287  Add<HStoreNamedField>(
12288      table,
12289      HObjectAccess::ForOrderedHashTableNumberOfBuckets<CollectionType>(),
12290      Add<HConstant>(kBucketCount));
12291  Add<HStoreNamedField>(
12292      table,
12293      HObjectAccess::ForOrderedHashTableNumberOfElements<CollectionType>(),
12294      graph()->GetConstant0());
12295  Add<HStoreNamedField>(
12296      table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12297                 CollectionType>(),
12298      graph()->GetConstant0());
12299
12300  // Fill the buckets with kNotFound.
12301  HValue* not_found = Add<HConstant>(CollectionType::kNotFound);
12302  for (int i = 0; i < kBucketCount; ++i) {
12303    Add<HStoreNamedField>(
12304        table, HObjectAccess::ForOrderedHashTableBucket<CollectionType>(i),
12305        not_found);
12306  }
12307
12308  // Fill the data table with undefined.
12309  HValue* undefined = graph()->GetConstantUndefined();
12310  for (int i = 0; i < (kCapacity * CollectionType::kEntrySize); ++i) {
12311    Add<HStoreNamedField>(table,
12312                          HObjectAccess::ForOrderedHashTableDataTableIndex<
12313                              CollectionType, kBucketCount>(i),
12314                          undefined);
12315  }
12316
12317  return table;
12318}
12319
12320
12321void HOptimizedGraphBuilder::GenerateSetInitialize(CallRuntime* call) {
12322  DCHECK(call->arguments()->length() == 1);
12323  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12324  HValue* receiver = Pop();
12325
12326  NoObservableSideEffectsScope no_effects(this);
12327  HValue* table = BuildAllocateOrderedHashTable<OrderedHashSet>();
12328  Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12329  return ast_context()->ReturnValue(receiver);
12330}
12331
12332
12333void HOptimizedGraphBuilder::GenerateMapInitialize(CallRuntime* call) {
12334  DCHECK(call->arguments()->length() == 1);
12335  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12336  HValue* receiver = Pop();
12337
12338  NoObservableSideEffectsScope no_effects(this);
12339  HValue* table = BuildAllocateOrderedHashTable<OrderedHashMap>();
12340  Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(), table);
12341  return ast_context()->ReturnValue(receiver);
12342}
12343
12344
12345template <typename CollectionType>
12346void HOptimizedGraphBuilder::BuildOrderedHashTableClear(HValue* receiver) {
12347  HValue* old_table = Add<HLoadNamedField>(
12348      receiver, nullptr, HObjectAccess::ForJSCollectionTable());
12349  HValue* new_table = BuildAllocateOrderedHashTable<CollectionType>();
12350  Add<HStoreNamedField>(
12351      old_table, HObjectAccess::ForOrderedHashTableNextTable<CollectionType>(),
12352      new_table);
12353  Add<HStoreNamedField>(
12354      old_table, HObjectAccess::ForOrderedHashTableNumberOfDeletedElements<
12355                     CollectionType>(),
12356      Add<HConstant>(CollectionType::kClearedTableSentinel));
12357  Add<HStoreNamedField>(receiver, HObjectAccess::ForJSCollectionTable(),
12358                        new_table);
12359}
12360
12361
12362void HOptimizedGraphBuilder::GenerateSetClear(CallRuntime* call) {
12363  DCHECK(call->arguments()->length() == 1);
12364  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12365  HValue* receiver = Pop();
12366
12367  NoObservableSideEffectsScope no_effects(this);
12368  BuildOrderedHashTableClear<OrderedHashSet>(receiver);
12369  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12370}
12371
12372
12373void HOptimizedGraphBuilder::GenerateMapClear(CallRuntime* call) {
12374  DCHECK(call->arguments()->length() == 1);
12375  CHECK_ALIVE(VisitForValue(call->arguments()->at(0)));
12376  HValue* receiver = Pop();
12377
12378  NoObservableSideEffectsScope no_effects(this);
12379  BuildOrderedHashTableClear<OrderedHashMap>(receiver);
12380  return ast_context()->ReturnValue(graph()->GetConstantUndefined());
12381}
12382
12383void HOptimizedGraphBuilder::GenerateDebugBreakInOptimizedCode(
12384    CallRuntime* call) {
12385  Add<HDebugBreak>();
12386  return ast_context()->ReturnValue(graph()->GetConstant0());
12387}
12388
12389
12390void HOptimizedGraphBuilder::GenerateDebugIsActive(CallRuntime* call) {
12391  DCHECK(call->arguments()->length() == 0);
12392  HValue* ref =
12393      Add<HConstant>(ExternalReference::debug_is_active_address(isolate()));
12394  HValue* value =
12395      Add<HLoadNamedField>(ref, nullptr, HObjectAccess::ForExternalUInteger8());
12396  return ast_context()->ReturnValue(value);
12397}
12398
12399#undef CHECK_BAILOUT
12400#undef CHECK_ALIVE
12401
12402
12403HEnvironment::HEnvironment(HEnvironment* outer,
12404                           Scope* scope,
12405                           Handle<JSFunction> closure,
12406                           Zone* zone)
12407    : closure_(closure),
12408      values_(0, zone),
12409      frame_type_(JS_FUNCTION),
12410      parameter_count_(0),
12411      specials_count_(1),
12412      local_count_(0),
12413      outer_(outer),
12414      entry_(NULL),
12415      pop_count_(0),
12416      push_count_(0),
12417      ast_id_(BailoutId::None()),
12418      zone_(zone) {
12419  DeclarationScope* declaration_scope = scope->GetDeclarationScope();
12420  Initialize(declaration_scope->num_parameters() + 1,
12421             declaration_scope->num_stack_slots(), 0);
12422}
12423
12424
12425HEnvironment::HEnvironment(Zone* zone, int parameter_count)
12426    : values_(0, zone),
12427      frame_type_(STUB),
12428      parameter_count_(parameter_count),
12429      specials_count_(1),
12430      local_count_(0),
12431      outer_(NULL),
12432      entry_(NULL),
12433      pop_count_(0),
12434      push_count_(0),
12435      ast_id_(BailoutId::None()),
12436      zone_(zone) {
12437  Initialize(parameter_count, 0, 0);
12438}
12439
12440
12441HEnvironment::HEnvironment(const HEnvironment* other, Zone* zone)
12442    : values_(0, zone),
12443      frame_type_(JS_FUNCTION),
12444      parameter_count_(0),
12445      specials_count_(0),
12446      local_count_(0),
12447      outer_(NULL),
12448      entry_(NULL),
12449      pop_count_(0),
12450      push_count_(0),
12451      ast_id_(other->ast_id()),
12452      zone_(zone) {
12453  Initialize(other);
12454}
12455
12456
12457HEnvironment::HEnvironment(HEnvironment* outer,
12458                           Handle<JSFunction> closure,
12459                           FrameType frame_type,
12460                           int arguments,
12461                           Zone* zone)
12462    : closure_(closure),
12463      values_(arguments, zone),
12464      frame_type_(frame_type),
12465      parameter_count_(arguments),
12466      specials_count_(0),
12467      local_count_(0),
12468      outer_(outer),
12469      entry_(NULL),
12470      pop_count_(0),
12471      push_count_(0),
12472      ast_id_(BailoutId::None()),
12473      zone_(zone) {
12474}
12475
12476
12477void HEnvironment::Initialize(int parameter_count,
12478                              int local_count,
12479                              int stack_height) {
12480  parameter_count_ = parameter_count;
12481  local_count_ = local_count;
12482
12483  // Avoid reallocating the temporaries' backing store on the first Push.
12484  int total = parameter_count + specials_count_ + local_count + stack_height;
12485  values_.Initialize(total + 4, zone());
12486  for (int i = 0; i < total; ++i) values_.Add(NULL, zone());
12487}
12488
12489
12490void HEnvironment::Initialize(const HEnvironment* other) {
12491  closure_ = other->closure();
12492  values_.AddAll(other->values_, zone());
12493  assigned_variables_.Union(other->assigned_variables_, zone());
12494  frame_type_ = other->frame_type_;
12495  parameter_count_ = other->parameter_count_;
12496  local_count_ = other->local_count_;
12497  if (other->outer_ != NULL) outer_ = other->outer_->Copy();  // Deep copy.
12498  entry_ = other->entry_;
12499  pop_count_ = other->pop_count_;
12500  push_count_ = other->push_count_;
12501  specials_count_ = other->specials_count_;
12502  ast_id_ = other->ast_id_;
12503}
12504
12505
12506void HEnvironment::AddIncomingEdge(HBasicBlock* block, HEnvironment* other) {
12507  DCHECK(!block->IsLoopHeader());
12508  DCHECK(values_.length() == other->values_.length());
12509
12510  int length = values_.length();
12511  for (int i = 0; i < length; ++i) {
12512    HValue* value = values_[i];
12513    if (value != NULL && value->IsPhi() && value->block() == block) {
12514      // There is already a phi for the i'th value.
12515      HPhi* phi = HPhi::cast(value);
12516      // Assert index is correct and that we haven't missed an incoming edge.
12517      DCHECK(phi->merged_index() == i || !phi->HasMergedIndex());
12518      DCHECK(phi->OperandCount() == block->predecessors()->length());
12519      phi->AddInput(other->values_[i]);
12520    } else if (values_[i] != other->values_[i]) {
12521      // There is a fresh value on the incoming edge, a phi is needed.
12522      DCHECK(values_[i] != NULL && other->values_[i] != NULL);
12523      HPhi* phi = block->AddNewPhi(i);
12524      HValue* old_value = values_[i];
12525      for (int j = 0; j < block->predecessors()->length(); j++) {
12526        phi->AddInput(old_value);
12527      }
12528      phi->AddInput(other->values_[i]);
12529      this->values_[i] = phi;
12530    }
12531  }
12532}
12533
12534
12535void HEnvironment::Bind(int index, HValue* value) {
12536  DCHECK(value != NULL);
12537  assigned_variables_.Add(index, zone());
12538  values_[index] = value;
12539}
12540
12541
12542bool HEnvironment::HasExpressionAt(int index) const {
12543  return index >= parameter_count_ + specials_count_ + local_count_;
12544}
12545
12546
12547bool HEnvironment::ExpressionStackIsEmpty() const {
12548  DCHECK(length() >= first_expression_index());
12549  return length() == first_expression_index();
12550}
12551
12552
12553void HEnvironment::SetExpressionStackAt(int index_from_top, HValue* value) {
12554  int count = index_from_top + 1;
12555  int index = values_.length() - count;
12556  DCHECK(HasExpressionAt(index));
12557  // The push count must include at least the element in question or else
12558  // the new value will not be included in this environment's history.
12559  if (push_count_ < count) {
12560    // This is the same effect as popping then re-pushing 'count' elements.
12561    pop_count_ += (count - push_count_);
12562    push_count_ = count;
12563  }
12564  values_[index] = value;
12565}
12566
12567
12568HValue* HEnvironment::RemoveExpressionStackAt(int index_from_top) {
12569  int count = index_from_top + 1;
12570  int index = values_.length() - count;
12571  DCHECK(HasExpressionAt(index));
12572  // Simulate popping 'count' elements and then
12573  // pushing 'count - 1' elements back.
12574  pop_count_ += Max(count - push_count_, 0);
12575  push_count_ = Max(push_count_ - count, 0) + (count - 1);
12576  return values_.Remove(index);
12577}
12578
12579
12580void HEnvironment::Drop(int count) {
12581  for (int i = 0; i < count; ++i) {
12582    Pop();
12583  }
12584}
12585
12586
12587void HEnvironment::Print() const {
12588  OFStream os(stdout);
12589  os << *this << "\n";
12590}
12591
12592
12593HEnvironment* HEnvironment::Copy() const {
12594  return new(zone()) HEnvironment(this, zone());
12595}
12596
12597
12598HEnvironment* HEnvironment::CopyWithoutHistory() const {
12599  HEnvironment* result = Copy();
12600  result->ClearHistory();
12601  return result;
12602}
12603
12604
12605HEnvironment* HEnvironment::CopyAsLoopHeader(HBasicBlock* loop_header) const {
12606  HEnvironment* new_env = Copy();
12607  for (int i = 0; i < values_.length(); ++i) {
12608    HPhi* phi = loop_header->AddNewPhi(i);
12609    phi->AddInput(values_[i]);
12610    new_env->values_[i] = phi;
12611  }
12612  new_env->ClearHistory();
12613  return new_env;
12614}
12615
12616
12617HEnvironment* HEnvironment::CreateStubEnvironment(HEnvironment* outer,
12618                                                  Handle<JSFunction> target,
12619                                                  FrameType frame_type,
12620                                                  int arguments) const {
12621  HEnvironment* new_env =
12622      new(zone()) HEnvironment(outer, target, frame_type,
12623                               arguments + 1, zone());
12624  for (int i = 0; i <= arguments; ++i) {  // Include receiver.
12625    new_env->Push(ExpressionStackAt(arguments - i));
12626  }
12627  new_env->ClearHistory();
12628  return new_env;
12629}
12630
12631void HEnvironment::MarkAsTailCaller() {
12632  DCHECK_EQ(JS_FUNCTION, frame_type());
12633  frame_type_ = TAIL_CALLER_FUNCTION;
12634}
12635
12636void HEnvironment::ClearTailCallerMark() {
12637  DCHECK_EQ(TAIL_CALLER_FUNCTION, frame_type());
12638  frame_type_ = JS_FUNCTION;
12639}
12640
12641HEnvironment* HEnvironment::CopyForInlining(
12642    Handle<JSFunction> target, int arguments, FunctionLiteral* function,
12643    HConstant* undefined, InliningKind inlining_kind,
12644    TailCallMode syntactic_tail_call_mode) const {
12645  DCHECK_EQ(JS_FUNCTION, frame_type());
12646
12647  // Outer environment is a copy of this one without the arguments.
12648  int arity = function->scope()->num_parameters();
12649
12650  HEnvironment* outer = Copy();
12651  outer->Drop(arguments + 1);  // Including receiver.
12652  outer->ClearHistory();
12653
12654  if (syntactic_tail_call_mode == TailCallMode::kAllow) {
12655    DCHECK_EQ(NORMAL_RETURN, inlining_kind);
12656    outer->MarkAsTailCaller();
12657  }
12658
12659  if (inlining_kind == CONSTRUCT_CALL_RETURN) {
12660    // Create artificial constructor stub environment.  The receiver should
12661    // actually be the constructor function, but we pass the newly allocated
12662    // object instead, DoComputeConstructStubFrame() relies on that.
12663    outer = CreateStubEnvironment(outer, target, JS_CONSTRUCT, arguments);
12664  } else if (inlining_kind == GETTER_CALL_RETURN) {
12665    // We need an additional StackFrame::INTERNAL frame for restoring the
12666    // correct context.
12667    outer = CreateStubEnvironment(outer, target, JS_GETTER, arguments);
12668  } else if (inlining_kind == SETTER_CALL_RETURN) {
12669    // We need an additional StackFrame::INTERNAL frame for temporarily saving
12670    // the argument of the setter, see StoreStubCompiler::CompileStoreViaSetter.
12671    outer = CreateStubEnvironment(outer, target, JS_SETTER, arguments);
12672  }
12673
12674  if (arity != arguments) {
12675    // Create artificial arguments adaptation environment.
12676    outer = CreateStubEnvironment(outer, target, ARGUMENTS_ADAPTOR, arguments);
12677  }
12678
12679  HEnvironment* inner =
12680      new(zone()) HEnvironment(outer, function->scope(), target, zone());
12681  // Get the argument values from the original environment.
12682  for (int i = 0; i <= arity; ++i) {  // Include receiver.
12683    HValue* push = (i <= arguments) ?
12684        ExpressionStackAt(arguments - i) : undefined;
12685    inner->SetValueAt(i, push);
12686  }
12687  inner->SetValueAt(arity + 1, context());
12688  for (int i = arity + 2; i < inner->length(); ++i) {
12689    inner->SetValueAt(i, undefined);
12690  }
12691
12692  inner->set_ast_id(BailoutId::FunctionEntry());
12693  return inner;
12694}
12695
12696
12697std::ostream& operator<<(std::ostream& os, const HEnvironment& env) {
12698  for (int i = 0; i < env.length(); i++) {
12699    if (i == 0) os << "parameters\n";
12700    if (i == env.parameter_count()) os << "specials\n";
12701    if (i == env.parameter_count() + env.specials_count()) os << "locals\n";
12702    if (i == env.parameter_count() + env.specials_count() + env.local_count()) {
12703      os << "expressions\n";
12704    }
12705    HValue* val = env.values()->at(i);
12706    os << i << ": ";
12707    if (val != NULL) {
12708      os << val;
12709    } else {
12710      os << "NULL";
12711    }
12712    os << "\n";
12713  }
12714  return os << "\n";
12715}
12716
12717
12718void HTracer::TraceCompilation(CompilationInfo* info) {
12719  Tag tag(this, "compilation");
12720  std::string name;
12721  if (info->parse_info()) {
12722    Object* source_name = info->script()->name();
12723    if (source_name->IsString()) {
12724      String* str = String::cast(source_name);
12725      if (str->length() > 0) {
12726        name.append(str->ToCString().get());
12727        name.append(":");
12728      }
12729    }
12730  }
12731  std::unique_ptr<char[]> method_name = info->GetDebugName();
12732  name.append(method_name.get());
12733  if (info->IsOptimizing()) {
12734    PrintStringProperty("name", name.c_str());
12735    PrintIndent();
12736    trace_.Add("method \"%s:%d\"\n", method_name.get(),
12737               info->optimization_id());
12738  } else {
12739    PrintStringProperty("name", name.c_str());
12740    PrintStringProperty("method", "stub");
12741  }
12742  PrintLongProperty("date",
12743                    static_cast<int64_t>(base::OS::TimeCurrentMillis()));
12744}
12745
12746
12747void HTracer::TraceLithium(const char* name, LChunk* chunk) {
12748  DCHECK(!chunk->isolate()->concurrent_recompilation_enabled());
12749  AllowHandleDereference allow_deref;
12750  AllowDeferredHandleDereference allow_deferred_deref;
12751  Trace(name, chunk->graph(), chunk);
12752}
12753
12754
12755void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
12756  DCHECK(!graph->isolate()->concurrent_recompilation_enabled());
12757  AllowHandleDereference allow_deref;
12758  AllowDeferredHandleDereference allow_deferred_deref;
12759  Trace(name, graph, NULL);
12760}
12761
12762
12763void HTracer::Trace(const char* name, HGraph* graph, LChunk* chunk) {
12764  Tag tag(this, "cfg");
12765  PrintStringProperty("name", name);
12766  const ZoneList<HBasicBlock*>* blocks = graph->blocks();
12767  for (int i = 0; i < blocks->length(); i++) {
12768    HBasicBlock* current = blocks->at(i);
12769    Tag block_tag(this, "block");
12770    PrintBlockProperty("name", current->block_id());
12771    PrintIntProperty("from_bci", -1);
12772    PrintIntProperty("to_bci", -1);
12773
12774    if (!current->predecessors()->is_empty()) {
12775      PrintIndent();
12776      trace_.Add("predecessors");
12777      for (int j = 0; j < current->predecessors()->length(); ++j) {
12778        trace_.Add(" \"B%d\"", current->predecessors()->at(j)->block_id());
12779      }
12780      trace_.Add("\n");
12781    } else {
12782      PrintEmptyProperty("predecessors");
12783    }
12784
12785    if (current->end()->SuccessorCount() == 0) {
12786      PrintEmptyProperty("successors");
12787    } else  {
12788      PrintIndent();
12789      trace_.Add("successors");
12790      for (HSuccessorIterator it(current->end()); !it.Done(); it.Advance()) {
12791        trace_.Add(" \"B%d\"", it.Current()->block_id());
12792      }
12793      trace_.Add("\n");
12794    }
12795
12796    PrintEmptyProperty("xhandlers");
12797
12798    {
12799      PrintIndent();
12800      trace_.Add("flags");
12801      if (current->IsLoopSuccessorDominator()) {
12802        trace_.Add(" \"dom-loop-succ\"");
12803      }
12804      if (current->IsUnreachable()) {
12805        trace_.Add(" \"dead\"");
12806      }
12807      if (current->is_osr_entry()) {
12808        trace_.Add(" \"osr\"");
12809      }
12810      trace_.Add("\n");
12811    }
12812
12813    if (current->dominator() != NULL) {
12814      PrintBlockProperty("dominator", current->dominator()->block_id());
12815    }
12816
12817    PrintIntProperty("loop_depth", current->LoopNestingDepth());
12818
12819    if (chunk != NULL) {
12820      int first_index = current->first_instruction_index();
12821      int last_index = current->last_instruction_index();
12822      PrintIntProperty(
12823          "first_lir_id",
12824          LifetimePosition::FromInstructionIndex(first_index).Value());
12825      PrintIntProperty(
12826          "last_lir_id",
12827          LifetimePosition::FromInstructionIndex(last_index).Value());
12828    }
12829
12830    {
12831      Tag states_tag(this, "states");
12832      Tag locals_tag(this, "locals");
12833      int total = current->phis()->length();
12834      PrintIntProperty("size", current->phis()->length());
12835      PrintStringProperty("method", "None");
12836      for (int j = 0; j < total; ++j) {
12837        HPhi* phi = current->phis()->at(j);
12838        PrintIndent();
12839        std::ostringstream os;
12840        os << phi->merged_index() << " " << NameOf(phi) << " " << *phi << "\n";
12841        trace_.Add(os.str().c_str());
12842      }
12843    }
12844
12845    {
12846      Tag HIR_tag(this, "HIR");
12847      for (HInstructionIterator it(current); !it.Done(); it.Advance()) {
12848        HInstruction* instruction = it.Current();
12849        int uses = instruction->UseCount();
12850        PrintIndent();
12851        std::ostringstream os;
12852        os << "0 " << uses << " " << NameOf(instruction) << " " << *instruction;
12853        if (instruction->has_position()) {
12854          const SourcePosition pos = instruction->position();
12855          os << " pos:";
12856          if (pos.isInlined()) os << "inlining(" << pos.InliningId() << "),";
12857          os << pos.ScriptOffset();
12858        }
12859        os << " <|@\n";
12860        trace_.Add(os.str().c_str());
12861      }
12862    }
12863
12864
12865    if (chunk != NULL) {
12866      Tag LIR_tag(this, "LIR");
12867      int first_index = current->first_instruction_index();
12868      int last_index = current->last_instruction_index();
12869      if (first_index != -1 && last_index != -1) {
12870        const ZoneList<LInstruction*>* instructions = chunk->instructions();
12871        for (int i = first_index; i <= last_index; ++i) {
12872          LInstruction* linstr = instructions->at(i);
12873          if (linstr != NULL) {
12874            PrintIndent();
12875            trace_.Add("%d ",
12876                       LifetimePosition::FromInstructionIndex(i).Value());
12877            linstr->PrintTo(&trace_);
12878            std::ostringstream os;
12879            os << " [hir:" << NameOf(linstr->hydrogen_value()) << "] <|@\n";
12880            trace_.Add(os.str().c_str());
12881          }
12882        }
12883      }
12884    }
12885  }
12886}
12887
12888
12889void HTracer::TraceLiveRanges(const char* name, LAllocator* allocator) {
12890  Tag tag(this, "intervals");
12891  PrintStringProperty("name", name);
12892
12893  const Vector<LiveRange*>* fixed_d = allocator->fixed_double_live_ranges();
12894  for (int i = 0; i < fixed_d->length(); ++i) {
12895    TraceLiveRange(fixed_d->at(i), "fixed", allocator->zone());
12896  }
12897
12898  const Vector<LiveRange*>* fixed = allocator->fixed_live_ranges();
12899  for (int i = 0; i < fixed->length(); ++i) {
12900    TraceLiveRange(fixed->at(i), "fixed", allocator->zone());
12901  }
12902
12903  const ZoneList<LiveRange*>* live_ranges = allocator->live_ranges();
12904  for (int i = 0; i < live_ranges->length(); ++i) {
12905    TraceLiveRange(live_ranges->at(i), "object", allocator->zone());
12906  }
12907}
12908
12909
12910void HTracer::TraceLiveRange(LiveRange* range, const char* type,
12911                             Zone* zone) {
12912  if (range != NULL && !range->IsEmpty()) {
12913    PrintIndent();
12914    trace_.Add("%d %s", range->id(), type);
12915    if (range->HasRegisterAssigned()) {
12916      LOperand* op = range->CreateAssignedOperand(zone);
12917      int assigned_reg = op->index();
12918      if (op->IsDoubleRegister()) {
12919        trace_.Add(" \"%s\"",
12920                   GetRegConfig()->GetDoubleRegisterName(assigned_reg));
12921      } else {
12922        DCHECK(op->IsRegister());
12923        trace_.Add(" \"%s\"",
12924                   GetRegConfig()->GetGeneralRegisterName(assigned_reg));
12925      }
12926    } else if (range->IsSpilled()) {
12927      LOperand* op = range->TopLevel()->GetSpillOperand();
12928      if (op->IsDoubleStackSlot()) {
12929        trace_.Add(" \"double_stack:%d\"", op->index());
12930      } else {
12931        DCHECK(op->IsStackSlot());
12932        trace_.Add(" \"stack:%d\"", op->index());
12933      }
12934    }
12935    int parent_index = -1;
12936    if (range->IsChild()) {
12937      parent_index = range->parent()->id();
12938    } else {
12939      parent_index = range->id();
12940    }
12941    LOperand* op = range->FirstHint();
12942    int hint_index = -1;
12943    if (op != NULL && op->IsUnallocated()) {
12944      hint_index = LUnallocated::cast(op)->virtual_register();
12945    }
12946    trace_.Add(" %d %d", parent_index, hint_index);
12947    UseInterval* cur_interval = range->first_interval();
12948    while (cur_interval != NULL && range->Covers(cur_interval->start())) {
12949      trace_.Add(" [%d, %d[",
12950                 cur_interval->start().Value(),
12951                 cur_interval->end().Value());
12952      cur_interval = cur_interval->next();
12953    }
12954
12955    UsePosition* current_pos = range->first_pos();
12956    while (current_pos != NULL) {
12957      if (current_pos->RegisterIsBeneficial() || FLAG_trace_all_uses) {
12958        trace_.Add(" %d M", current_pos->pos().Value());
12959      }
12960      current_pos = current_pos->next();
12961    }
12962
12963    trace_.Add(" \"\"\n");
12964  }
12965}
12966
12967
12968void HTracer::FlushToFile() {
12969  AppendChars(filename_.start(), trace_.ToCString().get(), trace_.length(),
12970              false);
12971  trace_.Reset();
12972}
12973
12974
12975void HStatistics::Initialize(CompilationInfo* info) {
12976  if (!info->has_shared_info()) return;
12977  source_size_ += info->shared_info()->SourceSize();
12978}
12979
12980
12981void HStatistics::Print() {
12982  PrintF(
12983      "\n"
12984      "----------------------------------------"
12985      "----------------------------------------\n"
12986      "--- Hydrogen timing results:\n"
12987      "----------------------------------------"
12988      "----------------------------------------\n");
12989  base::TimeDelta sum;
12990  for (int i = 0; i < times_.length(); ++i) {
12991    sum += times_[i];
12992  }
12993
12994  for (int i = 0; i < names_.length(); ++i) {
12995    PrintF("%33s", names_[i]);
12996    double ms = times_[i].InMillisecondsF();
12997    double percent = times_[i].PercentOf(sum);
12998    PrintF(" %8.3f ms / %4.1f %% ", ms, percent);
12999
13000    size_t size = sizes_[i];
13001    double size_percent = static_cast<double>(size) * 100 / total_size_;
13002    PrintF(" %9zu bytes / %4.1f %%\n", size, size_percent);
13003  }
13004
13005  PrintF(
13006      "----------------------------------------"
13007      "----------------------------------------\n");
13008  base::TimeDelta total = create_graph_ + optimize_graph_ + generate_code_;
13009  PrintF("%33s %8.3f ms / %4.1f %% \n", "Create graph",
13010         create_graph_.InMillisecondsF(), create_graph_.PercentOf(total));
13011  PrintF("%33s %8.3f ms / %4.1f %% \n", "Optimize graph",
13012         optimize_graph_.InMillisecondsF(), optimize_graph_.PercentOf(total));
13013  PrintF("%33s %8.3f ms / %4.1f %% \n", "Generate and install code",
13014         generate_code_.InMillisecondsF(), generate_code_.PercentOf(total));
13015  PrintF(
13016      "----------------------------------------"
13017      "----------------------------------------\n");
13018  PrintF("%33s %8.3f ms           %9zu bytes\n", "Total",
13019         total.InMillisecondsF(), total_size_);
13020  PrintF("%33s     (%.1f times slower than full code gen)\n", "",
13021         total.TimesOf(full_code_gen_));
13022
13023  double source_size_in_kb = static_cast<double>(source_size_) / 1024;
13024  double normalized_time =  source_size_in_kb > 0
13025      ? total.InMillisecondsF() / source_size_in_kb
13026      : 0;
13027  double normalized_size_in_kb =
13028      source_size_in_kb > 0
13029          ? static_cast<double>(total_size_) / 1024 / source_size_in_kb
13030          : 0;
13031  PrintF("%33s %8.3f ms           %7.3f kB allocated\n",
13032         "Average per kB source", normalized_time, normalized_size_in_kb);
13033}
13034
13035
13036void HStatistics::SaveTiming(const char* name, base::TimeDelta time,
13037                             size_t size) {
13038  total_size_ += size;
13039  for (int i = 0; i < names_.length(); ++i) {
13040    if (strcmp(names_[i], name) == 0) {
13041      times_[i] += time;
13042      sizes_[i] += size;
13043      return;
13044    }
13045  }
13046  names_.Add(name);
13047  times_.Add(time);
13048  sizes_.Add(size);
13049}
13050
13051
13052HPhase::~HPhase() {
13053  if (ShouldProduceTraceOutput()) {
13054    isolate()->GetHTracer()->TraceHydrogen(name(), graph_);
13055  }
13056
13057#ifdef DEBUG
13058  graph_->Verify(false);  // No full verify.
13059#endif
13060}
13061
13062}  // namespace internal
13063}  // namespace v8
13064