1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/deoptimizer.h"
6
7#include <memory>
8
9#include "src/accessors.h"
10#include "src/ast/prettyprinter.h"
11#include "src/codegen.h"
12#include "src/disasm.h"
13#include "src/frames-inl.h"
14#include "src/full-codegen/full-codegen.h"
15#include "src/global-handles.h"
16#include "src/interpreter/interpreter.h"
17#include "src/macro-assembler.h"
18#include "src/tracing/trace-event.h"
19#include "src/v8.h"
20
21
22namespace v8 {
23namespace internal {
24
25static MemoryChunk* AllocateCodeChunk(MemoryAllocator* allocator) {
26  return allocator->AllocateChunk(Deoptimizer::GetMaxDeoptTableSize(),
27                                  MemoryAllocator::GetCommitPageSize(),
28                                  EXECUTABLE, NULL);
29}
30
31
32DeoptimizerData::DeoptimizerData(MemoryAllocator* allocator)
33    : allocator_(allocator),
34      current_(NULL) {
35  for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
36    deopt_entry_code_entries_[i] = -1;
37    deopt_entry_code_[i] = AllocateCodeChunk(allocator);
38  }
39}
40
41
42DeoptimizerData::~DeoptimizerData() {
43  for (int i = 0; i <= Deoptimizer::kLastBailoutType; ++i) {
44    allocator_->Free<MemoryAllocator::kFull>(deopt_entry_code_[i]);
45    deopt_entry_code_[i] = NULL;
46  }
47}
48
49
50Code* Deoptimizer::FindDeoptimizingCode(Address addr) {
51  if (function_->IsHeapObject()) {
52    // Search all deoptimizing code in the native context of the function.
53    Isolate* isolate = function_->GetIsolate();
54    Context* native_context = function_->context()->native_context();
55    Object* element = native_context->DeoptimizedCodeListHead();
56    while (!element->IsUndefined(isolate)) {
57      Code* code = Code::cast(element);
58      CHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
59      if (code->contains(addr)) return code;
60      element = code->next_code_link();
61    }
62  }
63  return NULL;
64}
65
66
67// We rely on this function not causing a GC.  It is called from generated code
68// without having a real stack frame in place.
69Deoptimizer* Deoptimizer::New(JSFunction* function,
70                              BailoutType type,
71                              unsigned bailout_id,
72                              Address from,
73                              int fp_to_sp_delta,
74                              Isolate* isolate) {
75  Deoptimizer* deoptimizer = new Deoptimizer(isolate, function, type,
76                                             bailout_id, from, fp_to_sp_delta);
77  CHECK(isolate->deoptimizer_data()->current_ == NULL);
78  isolate->deoptimizer_data()->current_ = deoptimizer;
79  return deoptimizer;
80}
81
82
83// No larger than 2K on all platforms
84static const int kDeoptTableMaxEpilogueCodeSize = 2 * KB;
85
86
87size_t Deoptimizer::GetMaxDeoptTableSize() {
88  int entries_size =
89      Deoptimizer::kMaxNumberOfEntries * Deoptimizer::table_entry_size_;
90  int commit_page_size = static_cast<int>(MemoryAllocator::GetCommitPageSize());
91  int page_count = ((kDeoptTableMaxEpilogueCodeSize + entries_size - 1) /
92                    commit_page_size) + 1;
93  return static_cast<size_t>(commit_page_size * page_count);
94}
95
96
97Deoptimizer* Deoptimizer::Grab(Isolate* isolate) {
98  Deoptimizer* result = isolate->deoptimizer_data()->current_;
99  CHECK_NOT_NULL(result);
100  result->DeleteFrameDescriptions();
101  isolate->deoptimizer_data()->current_ = NULL;
102  return result;
103}
104
105DeoptimizedFrameInfo* Deoptimizer::DebuggerInspectableFrame(
106    JavaScriptFrame* frame,
107    int jsframe_index,
108    Isolate* isolate) {
109  CHECK(frame->is_optimized());
110
111  TranslatedState translated_values(frame);
112  translated_values.Prepare(false, frame->fp());
113
114  TranslatedState::iterator frame_it = translated_values.end();
115  int counter = jsframe_index;
116  for (auto it = translated_values.begin(); it != translated_values.end();
117       it++) {
118    if (it->kind() == TranslatedFrame::kFunction ||
119        it->kind() == TranslatedFrame::kInterpretedFunction) {
120      if (counter == 0) {
121        frame_it = it;
122        break;
123      }
124      counter--;
125    }
126  }
127  CHECK(frame_it != translated_values.end());
128
129  DeoptimizedFrameInfo* info =
130      new DeoptimizedFrameInfo(&translated_values, frame_it, isolate);
131
132  return info;
133}
134
135void Deoptimizer::GenerateDeoptimizationEntries(MacroAssembler* masm,
136                                                int count,
137                                                BailoutType type) {
138  TableEntryGenerator generator(masm, type, count);
139  generator.Generate();
140}
141
142void Deoptimizer::VisitAllOptimizedFunctionsForContext(
143    Context* context, OptimizedFunctionVisitor* visitor) {
144  DisallowHeapAllocation no_allocation;
145
146  CHECK(context->IsNativeContext());
147
148  visitor->EnterContext(context);
149
150  // Visit the list of optimized functions, removing elements that
151  // no longer refer to optimized code.
152  JSFunction* prev = NULL;
153  Object* element = context->OptimizedFunctionsListHead();
154  Isolate* isolate = context->GetIsolate();
155  while (!element->IsUndefined(isolate)) {
156    JSFunction* function = JSFunction::cast(element);
157    Object* next = function->next_function_link();
158    if (function->code()->kind() != Code::OPTIMIZED_FUNCTION ||
159        (visitor->VisitFunction(function),
160         function->code()->kind() != Code::OPTIMIZED_FUNCTION)) {
161      // The function no longer refers to optimized code, or the visitor
162      // changed the code to which it refers to no longer be optimized code.
163      // Remove the function from this list.
164      if (prev != NULL) {
165        prev->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
166      } else {
167        context->SetOptimizedFunctionsListHead(next);
168      }
169      // The visitor should not alter the link directly.
170      CHECK_EQ(function->next_function_link(), next);
171      // Set the next function link to undefined to indicate it is no longer
172      // in the optimized functions list.
173      function->set_next_function_link(context->GetHeap()->undefined_value(),
174                                       SKIP_WRITE_BARRIER);
175    } else {
176      // The visitor should not alter the link directly.
177      CHECK_EQ(function->next_function_link(), next);
178      // preserve this element.
179      prev = function;
180    }
181    element = next;
182  }
183
184  visitor->LeaveContext(context);
185}
186
187
188void Deoptimizer::VisitAllOptimizedFunctions(
189    Isolate* isolate,
190    OptimizedFunctionVisitor* visitor) {
191  DisallowHeapAllocation no_allocation;
192
193  // Run through the list of all native contexts.
194  Object* context = isolate->heap()->native_contexts_list();
195  while (!context->IsUndefined(isolate)) {
196    VisitAllOptimizedFunctionsForContext(Context::cast(context), visitor);
197    context = Context::cast(context)->next_context_link();
198  }
199}
200
201
202// Unlink functions referring to code marked for deoptimization, then move
203// marked code from the optimized code list to the deoptimized code list,
204// and patch code for lazy deopt.
205void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
206  DisallowHeapAllocation no_allocation;
207
208  // A "closure" that unlinks optimized code that is going to be
209  // deoptimized from the functions that refer to it.
210  class SelectedCodeUnlinker: public OptimizedFunctionVisitor {
211   public:
212    virtual void EnterContext(Context* context) { }  // Don't care.
213    virtual void LeaveContext(Context* context)  { }  // Don't care.
214    virtual void VisitFunction(JSFunction* function) {
215      Code* code = function->code();
216      if (!code->marked_for_deoptimization()) return;
217
218      // Unlink this function and evict from optimized code map.
219      SharedFunctionInfo* shared = function->shared();
220      function->set_code(shared->code());
221
222      if (FLAG_trace_deopt) {
223        CodeTracer::Scope scope(code->GetHeap()->isolate()->GetCodeTracer());
224        PrintF(scope.file(), "[deoptimizer unlinked: ");
225        function->PrintName(scope.file());
226        PrintF(scope.file(),
227               " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
228      }
229    }
230  };
231
232  // Unlink all functions that refer to marked code.
233  SelectedCodeUnlinker unlinker;
234  VisitAllOptimizedFunctionsForContext(context, &unlinker);
235
236  Isolate* isolate = context->GetHeap()->isolate();
237#ifdef DEBUG
238  Code* topmost_optimized_code = NULL;
239  bool safe_to_deopt_topmost_optimized_code = false;
240  // Make sure all activations of optimized code can deopt at their current PC.
241  // The topmost optimized code has special handling because it cannot be
242  // deoptimized due to weak object dependency.
243  for (StackFrameIterator it(isolate, isolate->thread_local_top());
244       !it.done(); it.Advance()) {
245    StackFrame::Type type = it.frame()->type();
246    if (type == StackFrame::OPTIMIZED) {
247      Code* code = it.frame()->LookupCode();
248      JSFunction* function =
249          static_cast<OptimizedFrame*>(it.frame())->function();
250      if (FLAG_trace_deopt) {
251        CodeTracer::Scope scope(isolate->GetCodeTracer());
252        PrintF(scope.file(), "[deoptimizer found activation of function: ");
253        function->PrintName(scope.file());
254        PrintF(scope.file(),
255               " / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
256      }
257      SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
258      int deopt_index = safepoint.deoptimization_index();
259      // Turbofan deopt is checked when we are patching addresses on stack.
260      bool turbofanned = code->is_turbofanned() &&
261                         function->shared()->asm_function() &&
262                         !FLAG_turbo_asm_deoptimization;
263      bool safe_to_deopt =
264          deopt_index != Safepoint::kNoDeoptimizationIndex || turbofanned;
265      bool builtin = code->kind() == Code::BUILTIN;
266      CHECK(topmost_optimized_code == NULL || safe_to_deopt || turbofanned ||
267            builtin);
268      if (topmost_optimized_code == NULL) {
269        topmost_optimized_code = code;
270        safe_to_deopt_topmost_optimized_code = safe_to_deopt;
271      }
272    }
273  }
274#endif
275
276  // Move marked code from the optimized code list to the deoptimized
277  // code list, collecting them into a ZoneList.
278  Zone zone(isolate->allocator(), ZONE_NAME);
279  ZoneList<Code*> codes(10, &zone);
280
281  // Walk over all optimized code objects in this native context.
282  Code* prev = NULL;
283  Object* element = context->OptimizedCodeListHead();
284  while (!element->IsUndefined(isolate)) {
285    Code* code = Code::cast(element);
286    CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
287    Object* next = code->next_code_link();
288
289    if (code->marked_for_deoptimization()) {
290      // Put the code into the list for later patching.
291      codes.Add(code, &zone);
292
293      if (prev != NULL) {
294        // Skip this code in the optimized code list.
295        prev->set_next_code_link(next);
296      } else {
297        // There was no previous node, the next node is the new head.
298        context->SetOptimizedCodeListHead(next);
299      }
300
301      // Move the code to the _deoptimized_ code list.
302      code->set_next_code_link(context->DeoptimizedCodeListHead());
303      context->SetDeoptimizedCodeListHead(code);
304    } else {
305      // Not marked; preserve this element.
306      prev = code;
307    }
308    element = next;
309  }
310
311  // We need a handle scope only because of the macro assembler,
312  // which is used in code patching in EnsureCodeForDeoptimizationEntry.
313  HandleScope scope(isolate);
314
315  // Now patch all the codes for deoptimization.
316  for (int i = 0; i < codes.length(); i++) {
317#ifdef DEBUG
318    if (codes[i] == topmost_optimized_code) {
319      DCHECK(safe_to_deopt_topmost_optimized_code);
320    }
321#endif
322    // It is finally time to die, code object.
323
324    // Remove the code from optimized code map.
325    DeoptimizationInputData* deopt_data =
326        DeoptimizationInputData::cast(codes[i]->deoptimization_data());
327    SharedFunctionInfo* shared =
328        SharedFunctionInfo::cast(deopt_data->SharedFunctionInfo());
329    shared->EvictFromOptimizedCodeMap(codes[i], "deoptimized code");
330
331    // Do platform-specific patching to force any activations to lazy deopt.
332    PatchCodeForDeoptimization(isolate, codes[i]);
333
334    // We might be in the middle of incremental marking with compaction.
335    // Tell collector to treat this code object in a special way and
336    // ignore all slots that might have been recorded on it.
337    isolate->heap()->mark_compact_collector()->InvalidateCode(codes[i]);
338  }
339}
340
341
342void Deoptimizer::DeoptimizeAll(Isolate* isolate) {
343  RuntimeCallTimerScope runtimeTimer(isolate,
344                                     &RuntimeCallStats::DeoptimizeCode);
345  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
346  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
347  if (FLAG_trace_deopt) {
348    CodeTracer::Scope scope(isolate->GetCodeTracer());
349    PrintF(scope.file(), "[deoptimize all code in all contexts]\n");
350  }
351  DisallowHeapAllocation no_allocation;
352  // For all contexts, mark all code, then deoptimize.
353  Object* context = isolate->heap()->native_contexts_list();
354  while (!context->IsUndefined(isolate)) {
355    Context* native_context = Context::cast(context);
356    MarkAllCodeForContext(native_context);
357    DeoptimizeMarkedCodeForContext(native_context);
358    context = native_context->next_context_link();
359  }
360}
361
362
363void Deoptimizer::DeoptimizeMarkedCode(Isolate* isolate) {
364  RuntimeCallTimerScope runtimeTimer(isolate,
365                                     &RuntimeCallStats::DeoptimizeCode);
366  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
367  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
368  if (FLAG_trace_deopt) {
369    CodeTracer::Scope scope(isolate->GetCodeTracer());
370    PrintF(scope.file(), "[deoptimize marked code in all contexts]\n");
371  }
372  DisallowHeapAllocation no_allocation;
373  // For all contexts, deoptimize code already marked.
374  Object* context = isolate->heap()->native_contexts_list();
375  while (!context->IsUndefined(isolate)) {
376    Context* native_context = Context::cast(context);
377    DeoptimizeMarkedCodeForContext(native_context);
378    context = native_context->next_context_link();
379  }
380}
381
382
383void Deoptimizer::MarkAllCodeForContext(Context* context) {
384  Object* element = context->OptimizedCodeListHead();
385  Isolate* isolate = context->GetIsolate();
386  while (!element->IsUndefined(isolate)) {
387    Code* code = Code::cast(element);
388    CHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
389    code->set_marked_for_deoptimization(true);
390    element = code->next_code_link();
391  }
392}
393
394
395void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
396  Isolate* isolate = function->GetIsolate();
397  RuntimeCallTimerScope runtimeTimer(isolate,
398                                     &RuntimeCallStats::DeoptimizeCode);
399  TimerEventScope<TimerEventDeoptimizeCode> timer(isolate);
400  TRACE_EVENT0("v8", "V8.DeoptimizeCode");
401  Code* code = function->code();
402  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
403    // Mark the code for deoptimization and unlink any functions that also
404    // refer to that code. The code cannot be shared across native contexts,
405    // so we only need to search one.
406    code->set_marked_for_deoptimization(true);
407    DeoptimizeMarkedCodeForContext(function->context()->native_context());
408  }
409}
410
411
412void Deoptimizer::ComputeOutputFrames(Deoptimizer* deoptimizer) {
413  deoptimizer->DoComputeOutputFrames();
414}
415
416bool Deoptimizer::TraceEnabledFor(StackFrame::Type frame_type) {
417  return (frame_type == StackFrame::STUB) ? FLAG_trace_stub_failures
418                                          : FLAG_trace_deopt;
419}
420
421
422const char* Deoptimizer::MessageFor(BailoutType type) {
423  switch (type) {
424    case EAGER: return "eager";
425    case SOFT: return "soft";
426    case LAZY: return "lazy";
427  }
428  FATAL("Unsupported deopt type");
429  return NULL;
430}
431
432Deoptimizer::Deoptimizer(Isolate* isolate, JSFunction* function,
433                         BailoutType type, unsigned bailout_id, Address from,
434                         int fp_to_sp_delta)
435    : isolate_(isolate),
436      function_(function),
437      bailout_id_(bailout_id),
438      bailout_type_(type),
439      from_(from),
440      fp_to_sp_delta_(fp_to_sp_delta),
441      deoptimizing_throw_(false),
442      catch_handler_data_(-1),
443      catch_handler_pc_offset_(-1),
444      input_(nullptr),
445      output_count_(0),
446      jsframe_count_(0),
447      output_(nullptr),
448      caller_frame_top_(0),
449      caller_fp_(0),
450      caller_pc_(0),
451      caller_constant_pool_(0),
452      input_frame_context_(0),
453      stack_fp_(0),
454      trace_scope_(nullptr) {
455  if (isolate->deoptimizer_lazy_throw()) {
456    isolate->set_deoptimizer_lazy_throw(false);
457    deoptimizing_throw_ = true;
458  }
459
460  // For COMPILED_STUBs called from builtins, the function pointer is a SMI
461  // indicating an internal frame.
462  if (function->IsSmi()) {
463    function = nullptr;
464  }
465  DCHECK(from != nullptr);
466  if (function != nullptr && function->IsOptimized()) {
467    function->shared()->increment_deopt_count();
468    if (bailout_type_ == Deoptimizer::SOFT) {
469      isolate->counters()->soft_deopts_executed()->Increment();
470      // Soft deopts shouldn't count against the overall re-optimization count
471      // that can eventually lead to disabling optimization for a function.
472      int opt_count = function->shared()->opt_count();
473      if (opt_count > 0) opt_count--;
474      function->shared()->set_opt_count(opt_count);
475    }
476  }
477  compiled_code_ = FindOptimizedCode(function);
478#if DEBUG
479  DCHECK(compiled_code_ != NULL);
480  if (type == EAGER || type == SOFT || type == LAZY) {
481    DCHECK(compiled_code_->kind() != Code::FUNCTION);
482  }
483#endif
484
485  StackFrame::Type frame_type = function == NULL
486      ? StackFrame::STUB
487      : StackFrame::JAVA_SCRIPT;
488  trace_scope_ = TraceEnabledFor(frame_type)
489                     ? new CodeTracer::Scope(isolate->GetCodeTracer())
490                     : NULL;
491#ifdef DEBUG
492  CHECK(AllowHeapAllocation::IsAllowed());
493  disallow_heap_allocation_ = new DisallowHeapAllocation();
494#endif  // DEBUG
495  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
496    PROFILE(isolate_, CodeDeoptEvent(compiled_code_, from_, fp_to_sp_delta_));
497  }
498  unsigned size = ComputeInputFrameSize();
499  int parameter_count =
500      function == nullptr
501          ? 0
502          : (function->shared()->internal_formal_parameter_count() + 1);
503  input_ = new (size) FrameDescription(size, parameter_count);
504  input_->SetFrameType(frame_type);
505}
506
507Code* Deoptimizer::FindOptimizedCode(JSFunction* function) {
508  Code* compiled_code = FindDeoptimizingCode(from_);
509  return (compiled_code == NULL)
510             ? static_cast<Code*>(isolate_->FindCodeObject(from_))
511             : compiled_code;
512}
513
514
515void Deoptimizer::PrintFunctionName() {
516  if (function_ != nullptr && function_->IsJSFunction()) {
517    function_->ShortPrint(trace_scope_->file());
518  } else {
519    PrintF(trace_scope_->file(),
520           "%s", Code::Kind2String(compiled_code_->kind()));
521  }
522}
523
524
525Deoptimizer::~Deoptimizer() {
526  DCHECK(input_ == NULL && output_ == NULL);
527  DCHECK(disallow_heap_allocation_ == NULL);
528  delete trace_scope_;
529}
530
531
532void Deoptimizer::DeleteFrameDescriptions() {
533  delete input_;
534  for (int i = 0; i < output_count_; ++i) {
535    if (output_[i] != input_) delete output_[i];
536  }
537  delete[] output_;
538  input_ = NULL;
539  output_ = NULL;
540#ifdef DEBUG
541  CHECK(!AllowHeapAllocation::IsAllowed());
542  CHECK(disallow_heap_allocation_ != NULL);
543  delete disallow_heap_allocation_;
544  disallow_heap_allocation_ = NULL;
545#endif  // DEBUG
546}
547
548
549Address Deoptimizer::GetDeoptimizationEntry(Isolate* isolate,
550                                            int id,
551                                            BailoutType type,
552                                            GetEntryMode mode) {
553  CHECK_GE(id, 0);
554  if (id >= kMaxNumberOfEntries) return NULL;
555  if (mode == ENSURE_ENTRY_CODE) {
556    EnsureCodeForDeoptimizationEntry(isolate, type, id);
557  } else {
558    CHECK_EQ(mode, CALCULATE_ENTRY_ADDRESS);
559  }
560  DeoptimizerData* data = isolate->deoptimizer_data();
561  CHECK_LE(type, kLastBailoutType);
562  MemoryChunk* base = data->deopt_entry_code_[type];
563  return base->area_start() + (id * table_entry_size_);
564}
565
566
567int Deoptimizer::GetDeoptimizationId(Isolate* isolate,
568                                     Address addr,
569                                     BailoutType type) {
570  DeoptimizerData* data = isolate->deoptimizer_data();
571  MemoryChunk* base = data->deopt_entry_code_[type];
572  Address start = base->area_start();
573  if (addr < start ||
574      addr >= start + (kMaxNumberOfEntries * table_entry_size_)) {
575    return kNotDeoptimizationEntry;
576  }
577  DCHECK_EQ(0,
578            static_cast<int>(addr - start) % table_entry_size_);
579  return static_cast<int>(addr - start) / table_entry_size_;
580}
581
582
583int Deoptimizer::GetOutputInfo(DeoptimizationOutputData* data,
584                               BailoutId id,
585                               SharedFunctionInfo* shared) {
586  // TODO(kasperl): For now, we do a simple linear search for the PC
587  // offset associated with the given node id. This should probably be
588  // changed to a binary search.
589  int length = data->DeoptPoints();
590  for (int i = 0; i < length; i++) {
591    if (data->AstId(i) == id) {
592      return data->PcAndState(i)->value();
593    }
594  }
595  OFStream os(stderr);
596  os << "[couldn't find pc offset for node=" << id.ToInt() << "]\n"
597     << "[method: " << shared->DebugName()->ToCString().get() << "]\n"
598     << "[source:\n" << SourceCodeOf(shared) << "\n]" << std::endl;
599
600  shared->GetHeap()->isolate()->PushStackTraceAndDie(0xfefefefe, data, shared,
601                                                     0xfefefeff);
602  FATAL("unable to find pc offset during deoptimization");
603  return -1;
604}
605
606
607int Deoptimizer::GetDeoptimizedCodeCount(Isolate* isolate) {
608  int length = 0;
609  // Count all entries in the deoptimizing code list of every context.
610  Object* context = isolate->heap()->native_contexts_list();
611  while (!context->IsUndefined(isolate)) {
612    Context* native_context = Context::cast(context);
613    Object* element = native_context->DeoptimizedCodeListHead();
614    while (!element->IsUndefined(isolate)) {
615      Code* code = Code::cast(element);
616      DCHECK(code->kind() == Code::OPTIMIZED_FUNCTION);
617      length++;
618      element = code->next_code_link();
619    }
620    context = Context::cast(context)->next_context_link();
621  }
622  return length;
623}
624
625namespace {
626
627int LookupCatchHandler(TranslatedFrame* translated_frame, int* data_out) {
628  switch (translated_frame->kind()) {
629    case TranslatedFrame::kFunction: {
630      BailoutId node_id = translated_frame->node_id();
631      JSFunction* function =
632          JSFunction::cast(translated_frame->begin()->GetRawValue());
633      Code* non_optimized_code = function->shared()->code();
634      FixedArray* raw_data = non_optimized_code->deoptimization_data();
635      DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
636      unsigned pc_and_state =
637          Deoptimizer::GetOutputInfo(data, node_id, function->shared());
638      unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
639      HandlerTable* table =
640          HandlerTable::cast(non_optimized_code->handler_table());
641      HandlerTable::CatchPrediction prediction;
642      return table->LookupRange(pc_offset, data_out, &prediction);
643    }
644    case TranslatedFrame::kInterpretedFunction: {
645      int bytecode_offset = translated_frame->node_id().ToInt();
646      JSFunction* function =
647          JSFunction::cast(translated_frame->begin()->GetRawValue());
648      BytecodeArray* bytecode = function->shared()->bytecode_array();
649      HandlerTable* table = HandlerTable::cast(bytecode->handler_table());
650      HandlerTable::CatchPrediction prediction;
651      return table->LookupRange(bytecode_offset, data_out, &prediction);
652    }
653    default:
654      break;
655  }
656  return -1;
657}
658
659}  // namespace
660
661// We rely on this function not causing a GC.  It is called from generated code
662// without having a real stack frame in place.
663void Deoptimizer::DoComputeOutputFrames() {
664  base::ElapsedTimer timer;
665
666  // Determine basic deoptimization information.  The optimized frame is
667  // described by the input data.
668  DeoptimizationInputData* input_data =
669      DeoptimizationInputData::cast(compiled_code_->deoptimization_data());
670
671  {
672    // Read caller's PC, caller's FP and caller's constant pool values
673    // from input frame. Compute caller's frame top address.
674
675    Register fp_reg = JavaScriptFrame::fp_register();
676    stack_fp_ = input_->GetRegister(fp_reg.code());
677
678    caller_frame_top_ = stack_fp_ + ComputeInputFrameAboveFpFixedSize();
679
680    Address fp_address = input_->GetFramePointerAddress();
681    caller_fp_ = Memory::intptr_at(fp_address);
682    caller_pc_ =
683        Memory::intptr_at(fp_address + CommonFrameConstants::kCallerPCOffset);
684    input_frame_context_ = Memory::intptr_at(
685        fp_address + CommonFrameConstants::kContextOrFrameTypeOffset);
686
687    if (FLAG_enable_embedded_constant_pool) {
688      caller_constant_pool_ = Memory::intptr_at(
689          fp_address + CommonFrameConstants::kConstantPoolOffset);
690    }
691  }
692
693  if (trace_scope_ != NULL) {
694    timer.Start();
695    PrintF(trace_scope_->file(), "[deoptimizing (DEOPT %s): begin ",
696           MessageFor(bailout_type_));
697    PrintFunctionName();
698    PrintF(trace_scope_->file(),
699           " (opt #%d) @%d, FP to SP delta: %d, caller sp: 0x%08" V8PRIxPTR
700           "]\n",
701           input_data->OptimizationId()->value(), bailout_id_, fp_to_sp_delta_,
702           caller_frame_top_);
703    if (bailout_type_ == EAGER || bailout_type_ == SOFT ||
704        (compiled_code_->is_hydrogen_stub())) {
705      compiled_code_->PrintDeoptLocation(trace_scope_->file(), from_);
706    }
707  }
708
709  BailoutId node_id = input_data->AstId(bailout_id_);
710  ByteArray* translations = input_data->TranslationByteArray();
711  unsigned translation_index =
712      input_data->TranslationIndex(bailout_id_)->value();
713
714  TranslationIterator state_iterator(translations, translation_index);
715  translated_state_.Init(
716      input_->GetFramePointerAddress(), &state_iterator,
717      input_data->LiteralArray(), input_->GetRegisterValues(),
718      trace_scope_ == nullptr ? nullptr : trace_scope_->file());
719
720  // Do the input frame to output frame(s) translation.
721  size_t count = translated_state_.frames().size();
722  // If we are supposed to go to the catch handler, find the catching frame
723  // for the catch and make sure we only deoptimize upto that frame.
724  if (deoptimizing_throw_) {
725    size_t catch_handler_frame_index = count;
726    for (size_t i = count; i-- > 0;) {
727      catch_handler_pc_offset_ = LookupCatchHandler(
728          &(translated_state_.frames()[i]), &catch_handler_data_);
729      if (catch_handler_pc_offset_ >= 0) {
730        catch_handler_frame_index = i;
731        break;
732      }
733    }
734    CHECK_LT(catch_handler_frame_index, count);
735    count = catch_handler_frame_index + 1;
736  }
737
738  DCHECK(output_ == NULL);
739  output_ = new FrameDescription*[count];
740  for (size_t i = 0; i < count; ++i) {
741    output_[i] = NULL;
742  }
743  output_count_ = static_cast<int>(count);
744
745  // Translate each output frame.
746  int frame_index = 0;  // output_frame_index
747  for (size_t i = 0; i < count; ++i, ++frame_index) {
748    // Read the ast node id, function, and frame height for this output frame.
749    TranslatedFrame* translated_frame = &(translated_state_.frames()[i]);
750    switch (translated_frame->kind()) {
751      case TranslatedFrame::kFunction:
752        DoComputeJSFrame(translated_frame, frame_index,
753                         deoptimizing_throw_ && i == count - 1);
754        jsframe_count_++;
755        break;
756      case TranslatedFrame::kInterpretedFunction:
757        DoComputeInterpretedFrame(translated_frame, frame_index,
758                                  deoptimizing_throw_ && i == count - 1);
759        jsframe_count_++;
760        break;
761      case TranslatedFrame::kArgumentsAdaptor:
762        DoComputeArgumentsAdaptorFrame(translated_frame, frame_index);
763        break;
764      case TranslatedFrame::kTailCallerFunction:
765        DoComputeTailCallerFrame(translated_frame, frame_index);
766        // Tail caller frame translations do not produce output frames.
767        frame_index--;
768        output_count_--;
769        break;
770      case TranslatedFrame::kConstructStub:
771        DoComputeConstructStubFrame(translated_frame, frame_index);
772        break;
773      case TranslatedFrame::kGetter:
774        DoComputeAccessorStubFrame(translated_frame, frame_index, false);
775        break;
776      case TranslatedFrame::kSetter:
777        DoComputeAccessorStubFrame(translated_frame, frame_index, true);
778        break;
779      case TranslatedFrame::kCompiledStub:
780        DoComputeCompiledStubFrame(translated_frame, frame_index);
781        break;
782      case TranslatedFrame::kInvalid:
783        FATAL("invalid frame");
784        break;
785    }
786  }
787
788  // Print some helpful diagnostic information.
789  if (trace_scope_ != NULL) {
790    double ms = timer.Elapsed().InMillisecondsF();
791    int index = output_count_ - 1;  // Index of the topmost frame.
792    PrintF(trace_scope_->file(), "[deoptimizing (%s): end ",
793           MessageFor(bailout_type_));
794    PrintFunctionName();
795    PrintF(trace_scope_->file(),
796           " @%d => node=%d, pc=0x%08" V8PRIxPTR ", caller sp=0x%08" V8PRIxPTR
797           ", state=%s, took %0.3f ms]\n",
798           bailout_id_, node_id.ToInt(), output_[index]->GetPc(),
799           caller_frame_top_, BailoutStateToString(static_cast<BailoutState>(
800                                  output_[index]->GetState()->value())),
801           ms);
802  }
803}
804
805void Deoptimizer::DoComputeJSFrame(TranslatedFrame* translated_frame,
806                                   int frame_index, bool goto_catch_handler) {
807  SharedFunctionInfo* shared = translated_frame->raw_shared_info();
808
809  TranslatedFrame::iterator value_iterator = translated_frame->begin();
810  bool is_bottommost = (0 == frame_index);
811  bool is_topmost = (output_count_ - 1 == frame_index);
812  int input_index = 0;
813
814  BailoutId node_id = translated_frame->node_id();
815  unsigned height =
816      translated_frame->height() - 1;  // Do not count the context.
817  unsigned height_in_bytes = height * kPointerSize;
818  if (goto_catch_handler) {
819    // Take the stack height from the handler table.
820    height = catch_handler_data_;
821    // We also make space for the exception itself.
822    height_in_bytes = (height + 1) * kPointerSize;
823    CHECK(is_topmost);
824  }
825
826  JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
827  value_iterator++;
828  input_index++;
829  if (trace_scope_ != NULL) {
830    PrintF(trace_scope_->file(), "  translating frame ");
831    std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
832    PrintF(trace_scope_->file(), "%s", name.get());
833    PrintF(trace_scope_->file(), " => node=%d, height=%d%s\n", node_id.ToInt(),
834           height_in_bytes, goto_catch_handler ? " (throw)" : "");
835  }
836
837  // The 'fixed' part of the frame consists of the incoming parameters and
838  // the part described by JavaScriptFrameConstants.
839  unsigned fixed_frame_size = ComputeJavascriptFixedSize(shared);
840  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
841
842  // Allocate and store the output frame description.
843  int parameter_count = shared->internal_formal_parameter_count() + 1;
844  FrameDescription* output_frame = new (output_frame_size)
845      FrameDescription(output_frame_size, parameter_count);
846  output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
847
848  CHECK(frame_index >= 0 && frame_index < output_count_);
849  CHECK_NULL(output_[frame_index]);
850  output_[frame_index] = output_frame;
851
852  // The top address of the frame is computed from the previous frame's top and
853  // this frame's size.
854  intptr_t top_address;
855  if (is_bottommost) {
856    top_address = caller_frame_top_ - output_frame_size;
857  } else {
858    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
859  }
860  output_frame->SetTop(top_address);
861
862  // Compute the incoming parameter translation.
863  unsigned output_offset = output_frame_size;
864  for (int i = 0; i < parameter_count; ++i) {
865    output_offset -= kPointerSize;
866    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
867                                 output_offset);
868  }
869
870  if (trace_scope_ != nullptr) {
871    PrintF(trace_scope_->file(), "    -------------------------\n");
872  }
873
874  // There are no translation commands for the caller's pc and fp, the
875  // context, and the function.  Synthesize their values and set them up
876  // explicitly.
877  //
878  // The caller's pc for the bottommost output frame is the same as in the
879  // input frame.  For all subsequent output frames, it can be read from the
880  // previous one.  This frame's pc can be computed from the non-optimized
881  // function code and AST id of the bailout.
882  output_offset -= kPCOnStackSize;
883  intptr_t value;
884  if (is_bottommost) {
885    value = caller_pc_;
886  } else {
887    value = output_[frame_index - 1]->GetPc();
888  }
889  output_frame->SetCallerPc(output_offset, value);
890  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
891
892  // The caller's frame pointer for the bottommost output frame is the same
893  // as in the input frame.  For all subsequent output frames, it can be
894  // read from the previous one.  Also compute and set this frame's frame
895  // pointer.
896  output_offset -= kFPOnStackSize;
897  if (is_bottommost) {
898    value = caller_fp_;
899  } else {
900    value = output_[frame_index - 1]->GetFp();
901  }
902  output_frame->SetCallerFp(output_offset, value);
903  intptr_t fp_value = top_address + output_offset;
904  output_frame->SetFp(fp_value);
905  if (is_topmost) {
906    Register fp_reg = JavaScriptFrame::fp_register();
907    output_frame->SetRegister(fp_reg.code(), fp_value);
908  }
909  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
910
911  if (FLAG_enable_embedded_constant_pool) {
912    // For the bottommost output frame the constant pool pointer can be gotten
913    // from the input frame. For subsequent output frames, it can be read from
914    // the previous frame.
915    output_offset -= kPointerSize;
916    if (is_bottommost) {
917      value = caller_constant_pool_;
918    } else {
919      value = output_[frame_index - 1]->GetConstantPool();
920    }
921    output_frame->SetCallerConstantPool(output_offset, value);
922    DebugPrintOutputSlot(value, frame_index, output_offset,
923                         "caller's constant_pool\n");
924  }
925
926  // For the bottommost output frame the context can be gotten from the input
927  // frame. For all subsequent output frames it can be gotten from the function
928  // so long as we don't inline functions that need local contexts.
929  output_offset -= kPointerSize;
930
931  // When deoptimizing into a catch block, we need to take the context
932  // from just above the top of the operand stack (we push the context
933  // at the entry of the try block).
934  TranslatedFrame::iterator context_pos = value_iterator;
935  int context_input_index = input_index;
936  if (goto_catch_handler) {
937    for (unsigned i = 0; i < height + 1; ++i) {
938      context_pos++;
939      context_input_index++;
940    }
941  }
942  // Read the context from the translations.
943  Object* context = context_pos->GetRawValue();
944  if (context->IsUndefined(isolate_)) {
945    // If the context was optimized away, just use the context from
946    // the activation. This should only apply to Crankshaft code.
947    CHECK(!compiled_code_->is_turbofanned());
948    context = is_bottommost ? reinterpret_cast<Object*>(input_frame_context_)
949                            : function->context();
950  }
951  value = reinterpret_cast<intptr_t>(context);
952  output_frame->SetContext(value);
953  WriteValueToOutput(context, context_input_index, frame_index, output_offset,
954                     "context    ");
955  if (context == isolate_->heap()->arguments_marker()) {
956    Address output_address =
957        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
958        output_offset;
959    values_to_materialize_.push_back({output_address, context_pos});
960  }
961  value_iterator++;
962  input_index++;
963
964  // The function was mentioned explicitly in the BEGIN_FRAME.
965  output_offset -= kPointerSize;
966  value = reinterpret_cast<intptr_t>(function);
967  WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
968
969  if (trace_scope_ != nullptr) {
970    PrintF(trace_scope_->file(), "    -------------------------\n");
971  }
972
973  // Translate the rest of the frame.
974  for (unsigned i = 0; i < height; ++i) {
975    output_offset -= kPointerSize;
976    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
977                                 output_offset);
978  }
979  if (goto_catch_handler) {
980    // Write out the exception for the catch handler.
981    output_offset -= kPointerSize;
982    Object* exception_obj = reinterpret_cast<Object*>(
983        input_->GetRegister(FullCodeGenerator::result_register().code()));
984    WriteValueToOutput(exception_obj, input_index, frame_index, output_offset,
985                       "exception   ");
986    input_index++;
987  }
988  CHECK_EQ(0u, output_offset);
989
990  // Update constant pool.
991  Code* non_optimized_code = shared->code();
992  if (FLAG_enable_embedded_constant_pool) {
993    intptr_t constant_pool_value =
994        reinterpret_cast<intptr_t>(non_optimized_code->constant_pool());
995    output_frame->SetConstantPool(constant_pool_value);
996    if (is_topmost) {
997      Register constant_pool_reg =
998          JavaScriptFrame::constant_pool_pointer_register();
999      output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1000    }
1001  }
1002
1003  // Compute this frame's PC and state.
1004  FixedArray* raw_data = non_optimized_code->deoptimization_data();
1005  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
1006  Address start = non_optimized_code->instruction_start();
1007  unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
1008  unsigned pc_offset = goto_catch_handler
1009                           ? catch_handler_pc_offset_
1010                           : FullCodeGenerator::PcField::decode(pc_and_state);
1011  intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
1012  output_frame->SetPc(pc_value);
1013
1014  // If we are going to the catch handler, then the exception lives in
1015  // the accumulator.
1016  BailoutState state =
1017      goto_catch_handler
1018          ? BailoutState::TOS_REGISTER
1019          : FullCodeGenerator::BailoutStateField::decode(pc_and_state);
1020  output_frame->SetState(Smi::FromInt(static_cast<int>(state)));
1021
1022  // Clear the context register. The context might be a de-materialized object
1023  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1024  // safety we use Smi(0) instead of the potential {arguments_marker} here.
1025  if (is_topmost) {
1026    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1027    Register context_reg = JavaScriptFrame::context_register();
1028    output_frame->SetRegister(context_reg.code(), context_value);
1029  }
1030
1031  // Set the continuation for the topmost frame.
1032  if (is_topmost) {
1033    Builtins* builtins = isolate_->builtins();
1034    Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1035    if (bailout_type_ == LAZY) {
1036      continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1037    } else if (bailout_type_ == SOFT) {
1038      continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1039    } else {
1040      CHECK_EQ(bailout_type_, EAGER);
1041    }
1042    output_frame->SetContinuation(
1043        reinterpret_cast<intptr_t>(continuation->entry()));
1044  }
1045}
1046
1047void Deoptimizer::DoComputeInterpretedFrame(TranslatedFrame* translated_frame,
1048                                            int frame_index,
1049                                            bool goto_catch_handler) {
1050  SharedFunctionInfo* shared = translated_frame->raw_shared_info();
1051
1052  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1053  bool is_bottommost = (0 == frame_index);
1054  bool is_topmost = (output_count_ - 1 == frame_index);
1055  int input_index = 0;
1056
1057  int bytecode_offset = translated_frame->node_id().ToInt();
1058  unsigned height = translated_frame->height();
1059  unsigned height_in_bytes = height * kPointerSize;
1060
1061  // All tranlations for interpreted frames contain the accumulator and hence
1062  // are assumed to be in bailout state {BailoutState::TOS_REGISTER}. However
1063  // such a state is only supported for the topmost frame. We need to skip
1064  // pushing the accumulator for any non-topmost frame.
1065  if (!is_topmost) height_in_bytes -= kPointerSize;
1066
1067  JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1068  value_iterator++;
1069  input_index++;
1070  if (trace_scope_ != NULL) {
1071    PrintF(trace_scope_->file(), "  translating interpreted frame ");
1072    std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
1073    PrintF(trace_scope_->file(), "%s", name.get());
1074    PrintF(trace_scope_->file(), " => bytecode_offset=%d, height=%d%s\n",
1075           bytecode_offset, height_in_bytes,
1076           goto_catch_handler ? " (throw)" : "");
1077  }
1078  if (goto_catch_handler) {
1079    bytecode_offset = catch_handler_pc_offset_;
1080  }
1081
1082  // The 'fixed' part of the frame consists of the incoming parameters and
1083  // the part described by InterpreterFrameConstants.
1084  unsigned fixed_frame_size = ComputeInterpretedFixedSize(shared);
1085  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1086
1087  // Allocate and store the output frame description.
1088  int parameter_count = shared->internal_formal_parameter_count() + 1;
1089  FrameDescription* output_frame = new (output_frame_size)
1090      FrameDescription(output_frame_size, parameter_count);
1091  output_frame->SetFrameType(StackFrame::INTERPRETED);
1092
1093  CHECK(frame_index >= 0 && frame_index < output_count_);
1094  CHECK_NULL(output_[frame_index]);
1095  output_[frame_index] = output_frame;
1096
1097  // The top address of the frame is computed from the previous frame's top and
1098  // this frame's size.
1099  intptr_t top_address;
1100  if (is_bottommost) {
1101    top_address = caller_frame_top_ - output_frame_size;
1102  } else {
1103    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1104  }
1105  output_frame->SetTop(top_address);
1106
1107  // Compute the incoming parameter translation.
1108  unsigned output_offset = output_frame_size;
1109  for (int i = 0; i < parameter_count; ++i) {
1110    output_offset -= kPointerSize;
1111    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1112                                 output_offset);
1113  }
1114
1115  if (trace_scope_ != nullptr) {
1116    PrintF(trace_scope_->file(), "    -------------------------\n");
1117  }
1118
1119  // There are no translation commands for the caller's pc and fp, the
1120  // context, the function, new.target and the bytecode offset.  Synthesize
1121  // their values and set them up
1122  // explicitly.
1123  //
1124  // The caller's pc for the bottommost output frame is the same as in the
1125  // input frame.  For all subsequent output frames, it can be read from the
1126  // previous one.  This frame's pc can be computed from the non-optimized
1127  // function code and AST id of the bailout.
1128  output_offset -= kPCOnStackSize;
1129  intptr_t value;
1130  if (is_bottommost) {
1131    value = caller_pc_;
1132  } else {
1133    value = output_[frame_index - 1]->GetPc();
1134  }
1135  output_frame->SetCallerPc(output_offset, value);
1136  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
1137
1138  // The caller's frame pointer for the bottommost output frame is the same
1139  // as in the input frame.  For all subsequent output frames, it can be
1140  // read from the previous one.  Also compute and set this frame's frame
1141  // pointer.
1142  output_offset -= kFPOnStackSize;
1143  if (is_bottommost) {
1144    value = caller_fp_;
1145  } else {
1146    value = output_[frame_index - 1]->GetFp();
1147  }
1148  output_frame->SetCallerFp(output_offset, value);
1149  intptr_t fp_value = top_address + output_offset;
1150  output_frame->SetFp(fp_value);
1151  if (is_topmost) {
1152    Register fp_reg = InterpretedFrame::fp_register();
1153    output_frame->SetRegister(fp_reg.code(), fp_value);
1154  }
1155  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1156
1157  if (FLAG_enable_embedded_constant_pool) {
1158    // For the bottommost output frame the constant pool pointer can be gotten
1159    // from the input frame. For subsequent output frames, it can be read from
1160    // the previous frame.
1161    output_offset -= kPointerSize;
1162    if (is_bottommost) {
1163      value = caller_constant_pool_;
1164    } else {
1165      value = output_[frame_index - 1]->GetConstantPool();
1166    }
1167    output_frame->SetCallerConstantPool(output_offset, value);
1168    DebugPrintOutputSlot(value, frame_index, output_offset,
1169                         "caller's constant_pool\n");
1170  }
1171
1172  // For the bottommost output frame the context can be gotten from the input
1173  // frame. For all subsequent output frames it can be gotten from the function
1174  // so long as we don't inline functions that need local contexts.
1175  output_offset -= kPointerSize;
1176
1177  // When deoptimizing into a catch block, we need to take the context
1178  // from a register that was specified in the handler table.
1179  TranslatedFrame::iterator context_pos = value_iterator;
1180  int context_input_index = input_index;
1181  if (goto_catch_handler) {
1182    // Skip to the translated value of the register specified
1183    // in the handler table.
1184    for (int i = 0; i < catch_handler_data_ + 1; ++i) {
1185      context_pos++;
1186      context_input_index++;
1187    }
1188  }
1189  // Read the context from the translations.
1190  Object* context = context_pos->GetRawValue();
1191  value = reinterpret_cast<intptr_t>(context);
1192  output_frame->SetContext(value);
1193  WriteValueToOutput(context, context_input_index, frame_index, output_offset,
1194                     "context    ");
1195  if (context == isolate_->heap()->arguments_marker()) {
1196    Address output_address =
1197        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
1198        output_offset;
1199    values_to_materialize_.push_back({output_address, context_pos});
1200  }
1201  value_iterator++;
1202  input_index++;
1203
1204  // The function was mentioned explicitly in the BEGIN_FRAME.
1205  output_offset -= kPointerSize;
1206  value = reinterpret_cast<intptr_t>(function);
1207  WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
1208
1209  // The new.target slot is only used during function activiation which is
1210  // before the first deopt point, so should never be needed. Just set it to
1211  // undefined.
1212  output_offset -= kPointerSize;
1213  Object* new_target = isolate_->heap()->undefined_value();
1214  WriteValueToOutput(new_target, 0, frame_index, output_offset, "new_target  ");
1215
1216  // Set the bytecode array pointer.
1217  output_offset -= kPointerSize;
1218  Object* bytecode_array = shared->HasDebugInfo()
1219                               ? shared->GetDebugInfo()->DebugBytecodeArray()
1220                               : shared->bytecode_array();
1221  WriteValueToOutput(bytecode_array, 0, frame_index, output_offset,
1222                     "bytecode array ");
1223
1224  // The bytecode offset was mentioned explicitly in the BEGIN_FRAME.
1225  output_offset -= kPointerSize;
1226  int raw_bytecode_offset =
1227      BytecodeArray::kHeaderSize - kHeapObjectTag + bytecode_offset;
1228  Smi* smi_bytecode_offset = Smi::FromInt(raw_bytecode_offset);
1229  WriteValueToOutput(smi_bytecode_offset, 0, frame_index, output_offset,
1230                     "bytecode offset ");
1231
1232  if (trace_scope_ != nullptr) {
1233    PrintF(trace_scope_->file(), "    -------------------------\n");
1234  }
1235
1236  // Translate the rest of the interpreter registers in the frame.
1237  for (unsigned i = 0; i < height - 1; ++i) {
1238    output_offset -= kPointerSize;
1239    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1240                                 output_offset);
1241  }
1242
1243  // Translate the accumulator register (depending on frame position).
1244  if (is_topmost) {
1245    // For topmost frame, put the accumulator on the stack. The bailout state
1246    // for interpreted frames is always set to {BailoutState::TOS_REGISTER} and
1247    // the {NotifyDeoptimized} builtin pops it off the topmost frame (possibly
1248    // after materialization).
1249    output_offset -= kPointerSize;
1250    if (goto_catch_handler) {
1251      // If we are lazy deopting to a catch handler, we set the accumulator to
1252      // the exception (which lives in the result register).
1253      intptr_t accumulator_value =
1254          input_->GetRegister(FullCodeGenerator::result_register().code());
1255      WriteValueToOutput(reinterpret_cast<Object*>(accumulator_value), 0,
1256                         frame_index, output_offset, "accumulator ");
1257      value_iterator++;
1258    } else {
1259      WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1260                                   output_offset, "accumulator ");
1261    }
1262  } else {
1263    // For non-topmost frames, skip the accumulator translation. For those
1264    // frames, the return value from the callee will become the accumulator.
1265    value_iterator++;
1266    input_index++;
1267  }
1268  CHECK_EQ(0u, output_offset);
1269
1270  // Compute this frame's PC and state. The PC will be a special builtin that
1271  // continues the bytecode dispatch. Note that non-topmost and lazy-style
1272  // bailout handlers also advance the bytecode offset before dispatch, hence
1273  // simulating what normal handlers do upon completion of the operation.
1274  Builtins* builtins = isolate_->builtins();
1275  Code* dispatch_builtin =
1276      (!is_topmost || (bailout_type_ == LAZY)) && !goto_catch_handler
1277          ? builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance)
1278          : builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1279  output_frame->SetPc(reinterpret_cast<intptr_t>(dispatch_builtin->entry()));
1280  // Restore accumulator (TOS) register.
1281  output_frame->SetState(
1282      Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
1283
1284  // Update constant pool.
1285  if (FLAG_enable_embedded_constant_pool) {
1286    intptr_t constant_pool_value =
1287        reinterpret_cast<intptr_t>(dispatch_builtin->constant_pool());
1288    output_frame->SetConstantPool(constant_pool_value);
1289    if (is_topmost) {
1290      Register constant_pool_reg =
1291          InterpretedFrame::constant_pool_pointer_register();
1292      output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
1293    }
1294  }
1295
1296  // Clear the context register. The context might be a de-materialized object
1297  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1298  // safety we use Smi(0) instead of the potential {arguments_marker} here.
1299  if (is_topmost) {
1300    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1301    Register context_reg = JavaScriptFrame::context_register();
1302    output_frame->SetRegister(context_reg.code(), context_value);
1303  }
1304
1305  // Set the continuation for the topmost frame.
1306  if (is_topmost) {
1307    Code* continuation = builtins->builtin(Builtins::kNotifyDeoptimized);
1308    if (bailout_type_ == LAZY) {
1309      continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1310    } else if (bailout_type_ == SOFT) {
1311      continuation = builtins->builtin(Builtins::kNotifySoftDeoptimized);
1312    } else {
1313      CHECK_EQ(bailout_type_, EAGER);
1314    }
1315    output_frame->SetContinuation(
1316        reinterpret_cast<intptr_t>(continuation->entry()));
1317  }
1318}
1319
1320void Deoptimizer::DoComputeArgumentsAdaptorFrame(
1321    TranslatedFrame* translated_frame, int frame_index) {
1322  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1323  bool is_bottommost = (0 == frame_index);
1324  int input_index = 0;
1325
1326  unsigned height = translated_frame->height();
1327  unsigned height_in_bytes = height * kPointerSize;
1328  JSFunction* function = JSFunction::cast(value_iterator->GetRawValue());
1329  value_iterator++;
1330  input_index++;
1331  if (trace_scope_ != NULL) {
1332    PrintF(trace_scope_->file(),
1333           "  translating arguments adaptor => height=%d\n", height_in_bytes);
1334  }
1335
1336  unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFixedFrameSize;
1337  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1338
1339  // Allocate and store the output frame description.
1340  int parameter_count = height;
1341  FrameDescription* output_frame = new (output_frame_size)
1342      FrameDescription(output_frame_size, parameter_count);
1343  output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
1344
1345  // Arguments adaptor can not be topmost.
1346  CHECK(frame_index < output_count_ - 1);
1347  CHECK(output_[frame_index] == NULL);
1348  output_[frame_index] = output_frame;
1349
1350  // The top address of the frame is computed from the previous frame's top and
1351  // this frame's size.
1352  intptr_t top_address;
1353  if (is_bottommost) {
1354    top_address = caller_frame_top_ - output_frame_size;
1355  } else {
1356    top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1357  }
1358  output_frame->SetTop(top_address);
1359
1360  // Compute the incoming parameter translation.
1361  unsigned output_offset = output_frame_size;
1362  for (int i = 0; i < parameter_count; ++i) {
1363    output_offset -= kPointerSize;
1364    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1365                                 output_offset);
1366  }
1367
1368  // Read caller's PC from the previous frame.
1369  output_offset -= kPCOnStackSize;
1370  intptr_t value;
1371  if (is_bottommost) {
1372    value = caller_pc_;
1373  } else {
1374    value = output_[frame_index - 1]->GetPc();
1375  }
1376  output_frame->SetCallerPc(output_offset, value);
1377  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's pc\n");
1378
1379  // Read caller's FP from the previous frame, and set this frame's FP.
1380  output_offset -= kFPOnStackSize;
1381  if (is_bottommost) {
1382    value = caller_fp_;
1383  } else {
1384    value = output_[frame_index - 1]->GetFp();
1385  }
1386  output_frame->SetCallerFp(output_offset, value);
1387  intptr_t fp_value = top_address + output_offset;
1388  output_frame->SetFp(fp_value);
1389  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1390
1391  if (FLAG_enable_embedded_constant_pool) {
1392    // Read the caller's constant pool from the previous frame.
1393    output_offset -= kPointerSize;
1394    if (is_bottommost) {
1395      value = caller_constant_pool_;
1396    } else {
1397      value = output_[frame_index - 1]->GetConstantPool();
1398    }
1399    output_frame->SetCallerConstantPool(output_offset, value);
1400    DebugPrintOutputSlot(value, frame_index, output_offset,
1401                         "caller's constant_pool\n");
1402  }
1403
1404  // A marker value is used in place of the context.
1405  output_offset -= kPointerSize;
1406  intptr_t context = reinterpret_cast<intptr_t>(
1407      Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1408  output_frame->SetFrameSlot(output_offset, context);
1409  DebugPrintOutputSlot(context, frame_index, output_offset,
1410                       "context (adaptor sentinel)\n");
1411
1412  // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
1413  output_offset -= kPointerSize;
1414  value = reinterpret_cast<intptr_t>(function);
1415  WriteValueToOutput(function, 0, frame_index, output_offset, "function    ");
1416
1417  // Number of incoming arguments.
1418  output_offset -= kPointerSize;
1419  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1420  output_frame->SetFrameSlot(output_offset, value);
1421  DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
1422  if (trace_scope_ != nullptr) {
1423    PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1424  }
1425
1426  DCHECK(0 == output_offset);
1427
1428  Builtins* builtins = isolate_->builtins();
1429  Code* adaptor_trampoline =
1430      builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
1431  intptr_t pc_value = reinterpret_cast<intptr_t>(
1432      adaptor_trampoline->instruction_start() +
1433      isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
1434  output_frame->SetPc(pc_value);
1435  if (FLAG_enable_embedded_constant_pool) {
1436    intptr_t constant_pool_value =
1437        reinterpret_cast<intptr_t>(adaptor_trampoline->constant_pool());
1438    output_frame->SetConstantPool(constant_pool_value);
1439  }
1440}
1441
1442void Deoptimizer::DoComputeTailCallerFrame(TranslatedFrame* translated_frame,
1443                                           int frame_index) {
1444  SharedFunctionInfo* shared = translated_frame->raw_shared_info();
1445
1446  bool is_bottommost = (0 == frame_index);
1447  // Tail caller frame can't be topmost.
1448  CHECK_NE(output_count_ - 1, frame_index);
1449
1450  if (trace_scope_ != NULL) {
1451    PrintF(trace_scope_->file(), "  translating tail caller frame ");
1452    std::unique_ptr<char[]> name = shared->DebugName()->ToCString();
1453    PrintF(trace_scope_->file(), "%s\n", name.get());
1454  }
1455
1456  if (!is_bottommost) return;
1457
1458  // Drop arguments adaptor frame below current frame if it exsits.
1459  Address fp_address = input_->GetFramePointerAddress();
1460  Address adaptor_fp_address =
1461      Memory::Address_at(fp_address + CommonFrameConstants::kCallerFPOffset);
1462
1463  if (Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR) !=
1464      Memory::Object_at(adaptor_fp_address +
1465                        CommonFrameConstants::kContextOrFrameTypeOffset)) {
1466    return;
1467  }
1468
1469  int caller_params_count =
1470      Smi::cast(
1471          Memory::Object_at(adaptor_fp_address +
1472                            ArgumentsAdaptorFrameConstants::kLengthOffset))
1473          ->value();
1474
1475  int callee_params_count =
1476      function_->shared()->internal_formal_parameter_count();
1477
1478  // Both caller and callee parameters count do not include receiver.
1479  int offset = (caller_params_count - callee_params_count) * kPointerSize;
1480  intptr_t new_stack_fp =
1481      reinterpret_cast<intptr_t>(adaptor_fp_address) + offset;
1482
1483  intptr_t new_caller_frame_top = new_stack_fp +
1484                                  (callee_params_count + 1) * kPointerSize +
1485                                  CommonFrameConstants::kFixedFrameSizeAboveFp;
1486
1487  intptr_t adaptor_caller_pc = Memory::intptr_at(
1488      adaptor_fp_address + CommonFrameConstants::kCallerPCOffset);
1489  intptr_t adaptor_caller_fp = Memory::intptr_at(
1490      adaptor_fp_address + CommonFrameConstants::kCallerFPOffset);
1491
1492  if (trace_scope_ != NULL) {
1493    PrintF(trace_scope_->file(),
1494           "    dropping caller arguments adaptor frame: offset=%d, "
1495           "fp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR
1496           ", "
1497           "caller sp: 0x%08" V8PRIxPTR " -> 0x%08" V8PRIxPTR "\n",
1498           offset, stack_fp_, new_stack_fp, caller_frame_top_,
1499           new_caller_frame_top);
1500  }
1501  caller_frame_top_ = new_caller_frame_top;
1502  caller_fp_ = adaptor_caller_fp;
1503  caller_pc_ = adaptor_caller_pc;
1504}
1505
1506void Deoptimizer::DoComputeConstructStubFrame(TranslatedFrame* translated_frame,
1507                                              int frame_index) {
1508  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1509  bool is_topmost = (output_count_ - 1 == frame_index);
1510  // The construct frame could become topmost only if we inlined a constructor
1511  // call which does a tail call (otherwise the tail callee's frame would be
1512  // the topmost one). So it could only be the LAZY case.
1513  CHECK(!is_topmost || bailout_type_ == LAZY);
1514  int input_index = 0;
1515
1516  Builtins* builtins = isolate_->builtins();
1517  Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
1518  unsigned height = translated_frame->height();
1519  unsigned height_in_bytes = height * kPointerSize;
1520
1521  // If the construct frame appears to be topmost we should ensure that the
1522  // value of result register is preserved during continuation execution.
1523  // We do this here by "pushing" the result of the constructor function to the
1524  // top of the reconstructed stack and then using the
1525  // BailoutState::TOS_REGISTER machinery.
1526  if (is_topmost) {
1527    height_in_bytes += kPointerSize;
1528  }
1529
1530  // Skip function.
1531  value_iterator++;
1532  input_index++;
1533  if (trace_scope_ != NULL) {
1534    PrintF(trace_scope_->file(),
1535           "  translating construct stub => height=%d\n", height_in_bytes);
1536  }
1537
1538  unsigned fixed_frame_size = ConstructFrameConstants::kFixedFrameSize;
1539  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1540
1541  // Allocate and store the output frame description.
1542  FrameDescription* output_frame =
1543      new (output_frame_size) FrameDescription(output_frame_size);
1544  output_frame->SetFrameType(StackFrame::CONSTRUCT);
1545
1546  // Construct stub can not be topmost.
1547  DCHECK(frame_index > 0 && frame_index < output_count_);
1548  DCHECK(output_[frame_index] == NULL);
1549  output_[frame_index] = output_frame;
1550
1551  // The top address of the frame is computed from the previous frame's top and
1552  // this frame's size.
1553  intptr_t top_address;
1554  top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1555  output_frame->SetTop(top_address);
1556
1557  // Compute the incoming parameter translation.
1558  int parameter_count = height;
1559  unsigned output_offset = output_frame_size;
1560  for (int i = 0; i < parameter_count; ++i) {
1561    output_offset -= kPointerSize;
1562    // The allocated receiver of a construct stub frame is passed as the
1563    // receiver parameter through the translation. It might be encoding
1564    // a captured object, override the slot address for a captured object.
1565    WriteTranslatedValueToOutput(
1566        &value_iterator, &input_index, frame_index, output_offset, nullptr,
1567        (i == 0) ? reinterpret_cast<Address>(top_address) : nullptr);
1568  }
1569
1570  // Read caller's PC from the previous frame.
1571  output_offset -= kPCOnStackSize;
1572  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1573  output_frame->SetCallerPc(output_offset, callers_pc);
1574  DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1575
1576  // Read caller's FP from the previous frame, and set this frame's FP.
1577  output_offset -= kFPOnStackSize;
1578  intptr_t value = output_[frame_index - 1]->GetFp();
1579  output_frame->SetCallerFp(output_offset, value);
1580  intptr_t fp_value = top_address + output_offset;
1581  output_frame->SetFp(fp_value);
1582  if (is_topmost) {
1583    Register fp_reg = JavaScriptFrame::fp_register();
1584    output_frame->SetRegister(fp_reg.code(), fp_value);
1585  }
1586  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1587
1588  if (FLAG_enable_embedded_constant_pool) {
1589    // Read the caller's constant pool from the previous frame.
1590    output_offset -= kPointerSize;
1591    value = output_[frame_index - 1]->GetConstantPool();
1592    output_frame->SetCallerConstantPool(output_offset, value);
1593    DebugPrintOutputSlot(value, frame_index, output_offset,
1594                         "caller's constant_pool\n");
1595  }
1596
1597  // A marker value is used to mark the frame.
1598  output_offset -= kPointerSize;
1599  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
1600  output_frame->SetFrameSlot(output_offset, value);
1601  DebugPrintOutputSlot(value, frame_index, output_offset,
1602                       "typed frame marker\n");
1603
1604  // The context can be gotten from the previous frame.
1605  output_offset -= kPointerSize;
1606  value = output_[frame_index - 1]->GetContext();
1607  output_frame->SetFrameSlot(output_offset, value);
1608  DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
1609
1610  // Number of incoming arguments.
1611  output_offset -= kPointerSize;
1612  value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
1613  output_frame->SetFrameSlot(output_offset, value);
1614  DebugPrintOutputSlot(value, frame_index, output_offset, "argc ");
1615  if (trace_scope_ != nullptr) {
1616    PrintF(trace_scope_->file(), "(%d)\n", height - 1);
1617  }
1618
1619  // The newly allocated object was passed as receiver in the artificial
1620  // constructor stub environment created by HEnvironment::CopyForInlining().
1621  output_offset -= kPointerSize;
1622  value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
1623  output_frame->SetFrameSlot(output_offset, value);
1624  DebugPrintOutputSlot(value, frame_index, output_offset,
1625                       "allocated receiver\n");
1626
1627  if (is_topmost) {
1628    // Ensure the result is restored back when we return to the stub.
1629    output_offset -= kPointerSize;
1630    Register result_reg = FullCodeGenerator::result_register();
1631    value = input_->GetRegister(result_reg.code());
1632    output_frame->SetFrameSlot(output_offset, value);
1633    DebugPrintOutputSlot(value, frame_index, output_offset,
1634                         "constructor result\n");
1635
1636    output_frame->SetState(
1637        Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
1638  }
1639
1640  CHECK_EQ(0u, output_offset);
1641
1642  intptr_t pc = reinterpret_cast<intptr_t>(
1643      construct_stub->instruction_start() +
1644      isolate_->heap()->construct_stub_deopt_pc_offset()->value());
1645  output_frame->SetPc(pc);
1646  if (FLAG_enable_embedded_constant_pool) {
1647    intptr_t constant_pool_value =
1648        reinterpret_cast<intptr_t>(construct_stub->constant_pool());
1649    output_frame->SetConstantPool(constant_pool_value);
1650    if (is_topmost) {
1651      Register constant_pool_reg =
1652          JavaScriptFrame::constant_pool_pointer_register();
1653      output_frame->SetRegister(constant_pool_reg.code(), fp_value);
1654    }
1655  }
1656
1657  // Clear the context register. The context might be a de-materialized object
1658  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1659  // safety we use Smi(0) instead of the potential {arguments_marker} here.
1660  if (is_topmost) {
1661    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1662    Register context_reg = JavaScriptFrame::context_register();
1663    output_frame->SetRegister(context_reg.code(), context_value);
1664  }
1665
1666  // Set the continuation for the topmost frame.
1667  if (is_topmost) {
1668    Builtins* builtins = isolate_->builtins();
1669    DCHECK_EQ(LAZY, bailout_type_);
1670    Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1671    output_frame->SetContinuation(
1672        reinterpret_cast<intptr_t>(continuation->entry()));
1673  }
1674}
1675
1676void Deoptimizer::DoComputeAccessorStubFrame(TranslatedFrame* translated_frame,
1677                                             int frame_index,
1678                                             bool is_setter_stub_frame) {
1679  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1680  bool is_topmost = (output_count_ - 1 == frame_index);
1681  // The accessor frame could become topmost only if we inlined an accessor
1682  // call which does a tail call (otherwise the tail callee's frame would be
1683  // the topmost one). So it could only be the LAZY case.
1684  CHECK(!is_topmost || bailout_type_ == LAZY);
1685  int input_index = 0;
1686
1687  // Skip accessor.
1688  value_iterator++;
1689  input_index++;
1690  // The receiver (and the implicit return value, if any) are expected in
1691  // registers by the LoadIC/StoreIC, so they don't belong to the output stack
1692  // frame. This means that we have to use a height of 0.
1693  unsigned height = 0;
1694  unsigned height_in_bytes = height * kPointerSize;
1695
1696  // If the accessor frame appears to be topmost we should ensure that the
1697  // value of result register is preserved during continuation execution.
1698  // We do this here by "pushing" the result of the accessor function to the
1699  // top of the reconstructed stack and then using the
1700  // BailoutState::TOS_REGISTER machinery.
1701  // We don't need to restore the result in case of a setter call because we
1702  // have to return the stored value but not the result of the setter function.
1703  bool should_preserve_result = is_topmost && !is_setter_stub_frame;
1704  if (should_preserve_result) {
1705    height_in_bytes += kPointerSize;
1706  }
1707
1708  const char* kind = is_setter_stub_frame ? "setter" : "getter";
1709  if (trace_scope_ != NULL) {
1710    PrintF(trace_scope_->file(),
1711           "  translating %s stub => height=%u\n", kind, height_in_bytes);
1712  }
1713
1714  // We need 1 stack entry for the return address and enough entries for the
1715  // StackFrame::INTERNAL (FP, frame type, context, code object and constant
1716  // pool (if enabled)- see MacroAssembler::EnterFrame).
1717  // For a setter stub frame we need one additional entry for the implicit
1718  // return value, see StoreStubCompiler::CompileStoreViaSetter.
1719  unsigned fixed_frame_entries =
1720      (StandardFrameConstants::kFixedFrameSize / kPointerSize) + 1 +
1721      (is_setter_stub_frame ? 1 : 0);
1722  unsigned fixed_frame_size = fixed_frame_entries * kPointerSize;
1723  unsigned output_frame_size = height_in_bytes + fixed_frame_size;
1724
1725  // Allocate and store the output frame description.
1726  FrameDescription* output_frame =
1727      new (output_frame_size) FrameDescription(output_frame_size);
1728  output_frame->SetFrameType(StackFrame::INTERNAL);
1729
1730  // A frame for an accessor stub can not be bottommost.
1731  CHECK(frame_index > 0 && frame_index < output_count_);
1732  CHECK_NULL(output_[frame_index]);
1733  output_[frame_index] = output_frame;
1734
1735  // The top address of the frame is computed from the previous frame's top and
1736  // this frame's size.
1737  intptr_t top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
1738  output_frame->SetTop(top_address);
1739
1740  unsigned output_offset = output_frame_size;
1741
1742  // Read caller's PC from the previous frame.
1743  output_offset -= kPCOnStackSize;
1744  intptr_t callers_pc = output_[frame_index - 1]->GetPc();
1745  output_frame->SetCallerPc(output_offset, callers_pc);
1746  DebugPrintOutputSlot(callers_pc, frame_index, output_offset, "caller's pc\n");
1747
1748  // Read caller's FP from the previous frame, and set this frame's FP.
1749  output_offset -= kFPOnStackSize;
1750  intptr_t value = output_[frame_index - 1]->GetFp();
1751  output_frame->SetCallerFp(output_offset, value);
1752  intptr_t fp_value = top_address + output_offset;
1753  output_frame->SetFp(fp_value);
1754  if (is_topmost) {
1755    Register fp_reg = JavaScriptFrame::fp_register();
1756    output_frame->SetRegister(fp_reg.code(), fp_value);
1757  }
1758  DebugPrintOutputSlot(value, frame_index, output_offset, "caller's fp\n");
1759
1760  if (FLAG_enable_embedded_constant_pool) {
1761    // Read the caller's constant pool from the previous frame.
1762    output_offset -= kPointerSize;
1763    value = output_[frame_index - 1]->GetConstantPool();
1764    output_frame->SetCallerConstantPool(output_offset, value);
1765    DebugPrintOutputSlot(value, frame_index, output_offset,
1766                         "caller's constant_pool\n");
1767  }
1768
1769  // Set the frame type.
1770  output_offset -= kPointerSize;
1771  value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::INTERNAL));
1772  output_frame->SetFrameSlot(output_offset, value);
1773  DebugPrintOutputSlot(value, frame_index, output_offset, "frame type ");
1774  if (trace_scope_ != nullptr) {
1775    PrintF(trace_scope_->file(), "(%s sentinel)\n", kind);
1776  }
1777
1778  // Get Code object from accessor stub.
1779  output_offset -= kPointerSize;
1780  Builtins::Name name = is_setter_stub_frame ?
1781      Builtins::kStoreIC_Setter_ForDeopt :
1782      Builtins::kLoadIC_Getter_ForDeopt;
1783  Code* accessor_stub = isolate_->builtins()->builtin(name);
1784  value = reinterpret_cast<intptr_t>(accessor_stub);
1785  output_frame->SetFrameSlot(output_offset, value);
1786  DebugPrintOutputSlot(value, frame_index, output_offset, "code object\n");
1787
1788  // The context can be gotten from the previous frame.
1789  output_offset -= kPointerSize;
1790  value = output_[frame_index - 1]->GetContext();
1791  output_frame->SetFrameSlot(output_offset, value);
1792  DebugPrintOutputSlot(value, frame_index, output_offset, "context\n");
1793
1794  // Skip receiver.
1795  value_iterator++;
1796  input_index++;
1797
1798  if (is_setter_stub_frame) {
1799    // The implicit return value was part of the artificial setter stub
1800    // environment.
1801    output_offset -= kPointerSize;
1802    WriteTranslatedValueToOutput(&value_iterator, &input_index, frame_index,
1803                                 output_offset);
1804  }
1805
1806  if (should_preserve_result) {
1807    // Ensure the result is restored back when we return to the stub.
1808    output_offset -= kPointerSize;
1809    Register result_reg = FullCodeGenerator::result_register();
1810    value = input_->GetRegister(result_reg.code());
1811    output_frame->SetFrameSlot(output_offset, value);
1812    DebugPrintOutputSlot(value, frame_index, output_offset,
1813                         "accessor result\n");
1814
1815    output_frame->SetState(
1816        Smi::FromInt(static_cast<int>(BailoutState::TOS_REGISTER)));
1817  } else {
1818    output_frame->SetState(
1819        Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS)));
1820  }
1821
1822  CHECK_EQ(0u, output_offset);
1823
1824  Smi* offset = is_setter_stub_frame ?
1825      isolate_->heap()->setter_stub_deopt_pc_offset() :
1826      isolate_->heap()->getter_stub_deopt_pc_offset();
1827  intptr_t pc = reinterpret_cast<intptr_t>(
1828      accessor_stub->instruction_start() + offset->value());
1829  output_frame->SetPc(pc);
1830  if (FLAG_enable_embedded_constant_pool) {
1831    intptr_t constant_pool_value =
1832        reinterpret_cast<intptr_t>(accessor_stub->constant_pool());
1833    output_frame->SetConstantPool(constant_pool_value);
1834    if (is_topmost) {
1835      Register constant_pool_reg =
1836          JavaScriptFrame::constant_pool_pointer_register();
1837      output_frame->SetRegister(constant_pool_reg.code(), fp_value);
1838    }
1839  }
1840
1841  // Clear the context register. The context might be a de-materialized object
1842  // and will be materialized by {Runtime_NotifyDeoptimized}. For additional
1843  // safety we use Smi(0) instead of the potential {arguments_marker} here.
1844  if (is_topmost) {
1845    intptr_t context_value = reinterpret_cast<intptr_t>(Smi::kZero);
1846    Register context_reg = JavaScriptFrame::context_register();
1847    output_frame->SetRegister(context_reg.code(), context_value);
1848  }
1849
1850  // Set the continuation for the topmost frame.
1851  if (is_topmost) {
1852    Builtins* builtins = isolate_->builtins();
1853    DCHECK_EQ(LAZY, bailout_type_);
1854    Code* continuation = builtins->builtin(Builtins::kNotifyLazyDeoptimized);
1855    output_frame->SetContinuation(
1856        reinterpret_cast<intptr_t>(continuation->entry()));
1857  }
1858}
1859
1860void Deoptimizer::DoComputeCompiledStubFrame(TranslatedFrame* translated_frame,
1861                                             int frame_index) {
1862  //
1863  //               FROM                                  TO
1864  //    |          ....           |          |          ....           |
1865  //    +-------------------------+          +-------------------------+
1866  //    | JSFunction continuation |          | JSFunction continuation |
1867  //    +-------------------------+          +-------------------------+
1868  // |  |    saved frame (FP)     |          |    saved frame (FP)     |
1869  // |  +=========================+<-fpreg   +=========================+<-fpreg
1870  // |  |constant pool (if ool_cp)|          |constant pool (if ool_cp)|
1871  // |  +-------------------------+          +-------------------------|
1872  // |  |   JSFunction context    |          |   JSFunction context    |
1873  // v  +-------------------------+          +-------------------------|
1874  //    |   COMPILED_STUB marker  |          |   STUB_FAILURE marker   |
1875  //    +-------------------------+          +-------------------------+
1876  //    |                         |          |  caller args.arguments_ |
1877  //    | ...                     |          +-------------------------+
1878  //    |                         |          |  caller args.length_    |
1879  //    |-------------------------|<-spreg   +-------------------------+
1880  //                                         |  caller args pointer    |
1881  //                                         +-------------------------+
1882  //                                         |  caller stack param 1   |
1883  //      parameters in registers            +-------------------------+
1884  //       and spilled to stack              |           ....          |
1885  //                                         +-------------------------+
1886  //                                         |  caller stack param n   |
1887  //                                         +-------------------------+<-spreg
1888  //                                         reg = number of parameters
1889  //                                         reg = failure handler address
1890  //                                         reg = saved frame
1891  //                                         reg = JSFunction context
1892  //
1893  // Caller stack params contain the register parameters to the stub first,
1894  // and then, if the descriptor specifies a constant number of stack
1895  // parameters, the stack parameters as well.
1896
1897  TranslatedFrame::iterator value_iterator = translated_frame->begin();
1898  int input_index = 0;
1899
1900  CHECK(compiled_code_->is_hydrogen_stub());
1901  int major_key = CodeStub::GetMajorKey(compiled_code_);
1902  CodeStubDescriptor descriptor(isolate_, compiled_code_->stub_key());
1903
1904  // The output frame must have room for all pushed register parameters
1905  // and the standard stack frame slots.  Include space for an argument
1906  // object to the callee and optionally the space to pass the argument
1907  // object to the stub failure handler.
1908  int param_count = descriptor.GetRegisterParameterCount();
1909  int stack_param_count = descriptor.GetStackParameterCount();
1910  // The translated frame contains all of the register parameters
1911  // plus the context.
1912  CHECK_EQ(translated_frame->height(), param_count + 1);
1913  CHECK_GE(param_count, 0);
1914
1915  int height_in_bytes = kPointerSize * (param_count + stack_param_count);
1916  int fixed_frame_size = StubFailureTrampolineFrameConstants::kFixedFrameSize;
1917  int output_frame_size = height_in_bytes + fixed_frame_size;
1918  if (trace_scope_ != NULL) {
1919    PrintF(trace_scope_->file(),
1920           "  translating %s => StubFailureTrampolineStub, height=%d\n",
1921           CodeStub::MajorName(static_cast<CodeStub::Major>(major_key)),
1922           height_in_bytes);
1923  }
1924
1925  // The stub failure trampoline is a single frame.
1926  FrameDescription* output_frame =
1927      new (output_frame_size) FrameDescription(output_frame_size);
1928  output_frame->SetFrameType(StackFrame::STUB_FAILURE_TRAMPOLINE);
1929  CHECK_EQ(frame_index, 0);
1930  output_[frame_index] = output_frame;
1931
1932  // The top address of the frame is computed from the previous frame's top and
1933  // this frame's size.
1934  intptr_t top_address = caller_frame_top_ - output_frame_size;
1935  output_frame->SetTop(top_address);
1936
1937  // Set caller's PC (JSFunction continuation).
1938  unsigned output_frame_offset = output_frame_size - kFPOnStackSize;
1939  intptr_t value = caller_pc_;
1940  output_frame->SetCallerPc(output_frame_offset, value);
1941  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1942                       "caller's pc\n");
1943
1944  // Read caller's FP from the input frame, and set this frame's FP.
1945  value = caller_fp_;
1946  output_frame_offset -= kFPOnStackSize;
1947  output_frame->SetCallerFp(output_frame_offset, value);
1948  intptr_t frame_ptr = top_address + output_frame_offset;
1949  Register fp_reg = StubFailureTrampolineFrame::fp_register();
1950  output_frame->SetRegister(fp_reg.code(), frame_ptr);
1951  output_frame->SetFp(frame_ptr);
1952  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1953                       "caller's fp\n");
1954
1955  if (FLAG_enable_embedded_constant_pool) {
1956    // Read the caller's constant pool from the input frame.
1957    value = caller_constant_pool_;
1958    output_frame_offset -= kPointerSize;
1959    output_frame->SetCallerConstantPool(output_frame_offset, value);
1960    DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1961                         "caller's constant_pool\n");
1962  }
1963
1964  // The marker for the typed stack frame
1965  output_frame_offset -= kPointerSize;
1966  value = reinterpret_cast<intptr_t>(
1967      Smi::FromInt(StackFrame::STUB_FAILURE_TRAMPOLINE));
1968  output_frame->SetFrameSlot(output_frame_offset, value);
1969  DebugPrintOutputSlot(value, frame_index, output_frame_offset,
1970                       "function (stub failure sentinel)\n");
1971
1972  intptr_t caller_arg_count = stack_param_count;
1973  bool arg_count_known = !descriptor.stack_parameter_count().is_valid();
1974
1975  // Build the Arguments object for the caller's parameters and a pointer to it.
1976  output_frame_offset -= kPointerSize;
1977  int args_arguments_offset = output_frame_offset;
1978  intptr_t the_hole = reinterpret_cast<intptr_t>(
1979      isolate_->heap()->the_hole_value());
1980  if (arg_count_known) {
1981    value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
1982        (caller_arg_count - 1) * kPointerSize;
1983  } else {
1984    value = the_hole;
1985  }
1986
1987  output_frame->SetFrameSlot(args_arguments_offset, value);
1988  DebugPrintOutputSlot(
1989      value, frame_index, args_arguments_offset,
1990      arg_count_known ? "args.arguments\n" : "args.arguments (the hole)\n");
1991
1992  output_frame_offset -= kPointerSize;
1993  int length_frame_offset = output_frame_offset;
1994  value = arg_count_known ? caller_arg_count : the_hole;
1995  output_frame->SetFrameSlot(length_frame_offset, value);
1996  DebugPrintOutputSlot(
1997      value, frame_index, length_frame_offset,
1998      arg_count_known ? "args.length\n" : "args.length (the hole)\n");
1999
2000  output_frame_offset -= kPointerSize;
2001  value = frame_ptr + StandardFrameConstants::kCallerSPOffset -
2002      (output_frame_size - output_frame_offset) + kPointerSize;
2003  output_frame->SetFrameSlot(output_frame_offset, value);
2004  DebugPrintOutputSlot(value, frame_index, output_frame_offset, "args*\n");
2005
2006  // Copy the register parameters to the failure frame.
2007  int arguments_length_offset = -1;
2008  for (int i = 0; i < param_count; ++i) {
2009    output_frame_offset -= kPointerSize;
2010    WriteTranslatedValueToOutput(&value_iterator, &input_index, 0,
2011                                 output_frame_offset);
2012
2013    if (!arg_count_known &&
2014        descriptor.GetRegisterParameter(i)
2015            .is(descriptor.stack_parameter_count())) {
2016      arguments_length_offset = output_frame_offset;
2017    }
2018  }
2019
2020  Object* maybe_context = value_iterator->GetRawValue();
2021  CHECK(maybe_context->IsContext());
2022  Register context_reg = StubFailureTrampolineFrame::context_register();
2023  value = reinterpret_cast<intptr_t>(maybe_context);
2024  output_frame->SetRegister(context_reg.code(), value);
2025  ++value_iterator;
2026
2027  // Copy constant stack parameters to the failure frame. If the number of stack
2028  // parameters is not known in the descriptor, the arguments object is the way
2029  // to access them.
2030  for (int i = 0; i < stack_param_count; i++) {
2031    output_frame_offset -= kPointerSize;
2032    Object** stack_parameter = reinterpret_cast<Object**>(
2033        frame_ptr + StandardFrameConstants::kCallerSPOffset +
2034        (stack_param_count - i - 1) * kPointerSize);
2035    value = reinterpret_cast<intptr_t>(*stack_parameter);
2036    output_frame->SetFrameSlot(output_frame_offset, value);
2037    DebugPrintOutputSlot(value, frame_index, output_frame_offset,
2038                         "stack parameter\n");
2039  }
2040
2041  CHECK_EQ(0u, output_frame_offset);
2042
2043  if (!arg_count_known) {
2044    CHECK_GE(arguments_length_offset, 0);
2045    // We know it's a smi because 1) the code stub guarantees the stack
2046    // parameter count is in smi range, and 2) the DoTranslateCommand in the
2047    // parameter loop above translated that to a tagged value.
2048    Smi* smi_caller_arg_count = reinterpret_cast<Smi*>(
2049        output_frame->GetFrameSlot(arguments_length_offset));
2050    caller_arg_count = smi_caller_arg_count->value();
2051    output_frame->SetFrameSlot(length_frame_offset, caller_arg_count);
2052    DebugPrintOutputSlot(caller_arg_count, frame_index, length_frame_offset,
2053                         "args.length\n");
2054    value = frame_ptr + StandardFrameConstants::kCallerSPOffset +
2055        (caller_arg_count - 1) * kPointerSize;
2056    output_frame->SetFrameSlot(args_arguments_offset, value);
2057    DebugPrintOutputSlot(value, frame_index, args_arguments_offset,
2058                         "args.arguments");
2059  }
2060
2061  // Copy the double registers from the input into the output frame.
2062  CopyDoubleRegisters(output_frame);
2063
2064  // Fill registers containing handler and number of parameters.
2065  SetPlatformCompiledStubRegisters(output_frame, &descriptor);
2066
2067  // Compute this frame's PC, state, and continuation.
2068  Code* trampoline = NULL;
2069  StubFunctionMode function_mode = descriptor.function_mode();
2070  StubFailureTrampolineStub(isolate_, function_mode)
2071      .FindCodeInCache(&trampoline);
2072  DCHECK(trampoline != NULL);
2073  output_frame->SetPc(reinterpret_cast<intptr_t>(
2074      trampoline->instruction_start()));
2075  if (FLAG_enable_embedded_constant_pool) {
2076    Register constant_pool_reg =
2077        StubFailureTrampolineFrame::constant_pool_pointer_register();
2078    intptr_t constant_pool_value =
2079        reinterpret_cast<intptr_t>(trampoline->constant_pool());
2080    output_frame->SetConstantPool(constant_pool_value);
2081    output_frame->SetRegister(constant_pool_reg.code(), constant_pool_value);
2082  }
2083  output_frame->SetState(
2084      Smi::FromInt(static_cast<int>(BailoutState::NO_REGISTERS)));
2085  Code* notify_failure =
2086      isolate_->builtins()->builtin(Builtins::kNotifyStubFailureSaveDoubles);
2087  output_frame->SetContinuation(
2088      reinterpret_cast<intptr_t>(notify_failure->entry()));
2089}
2090
2091
2092void Deoptimizer::MaterializeHeapObjects(JavaScriptFrameIterator* it) {
2093  // Walk to the last JavaScript output frame to find out if it has
2094  // adapted arguments.
2095  for (int frame_index = 0; frame_index < jsframe_count(); ++frame_index) {
2096    if (frame_index != 0) it->Advance();
2097  }
2098  translated_state_.Prepare(it->frame()->has_adapted_arguments(),
2099                            reinterpret_cast<Address>(stack_fp_));
2100
2101  for (auto& materialization : values_to_materialize_) {
2102    Handle<Object> value = materialization.value_->GetValue();
2103
2104    if (trace_scope_ != nullptr) {
2105      PrintF("Materialization [0x%08" V8PRIxPTR "] <- 0x%08" V8PRIxPTR " ;  ",
2106             reinterpret_cast<intptr_t>(materialization.output_slot_address_),
2107             reinterpret_cast<intptr_t>(*value));
2108      value->ShortPrint(trace_scope_->file());
2109      PrintF(trace_scope_->file(), "\n");
2110    }
2111
2112    *(reinterpret_cast<intptr_t*>(materialization.output_slot_address_)) =
2113        reinterpret_cast<intptr_t>(*value);
2114  }
2115
2116  isolate_->materialized_object_store()->Remove(
2117      reinterpret_cast<Address>(stack_fp_));
2118}
2119
2120
2121void Deoptimizer::WriteTranslatedValueToOutput(
2122    TranslatedFrame::iterator* iterator, int* input_index, int frame_index,
2123    unsigned output_offset, const char* debug_hint_string,
2124    Address output_address_for_materialization) {
2125  Object* value = (*iterator)->GetRawValue();
2126
2127  WriteValueToOutput(value, *input_index, frame_index, output_offset,
2128                     debug_hint_string);
2129
2130  if (value == isolate_->heap()->arguments_marker()) {
2131    Address output_address =
2132        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
2133        output_offset;
2134    if (output_address_for_materialization == nullptr) {
2135      output_address_for_materialization = output_address;
2136    }
2137    values_to_materialize_.push_back(
2138        {output_address_for_materialization, *iterator});
2139  }
2140
2141  (*iterator)++;
2142  (*input_index)++;
2143}
2144
2145
2146void Deoptimizer::WriteValueToOutput(Object* value, int input_index,
2147                                     int frame_index, unsigned output_offset,
2148                                     const char* debug_hint_string) {
2149  output_[frame_index]->SetFrameSlot(output_offset,
2150                                     reinterpret_cast<intptr_t>(value));
2151
2152  if (trace_scope_ != nullptr) {
2153    DebugPrintOutputSlot(reinterpret_cast<intptr_t>(value), frame_index,
2154                         output_offset, debug_hint_string);
2155    value->ShortPrint(trace_scope_->file());
2156    PrintF(trace_scope_->file(), "  (input #%d)\n", input_index);
2157  }
2158}
2159
2160
2161void Deoptimizer::DebugPrintOutputSlot(intptr_t value, int frame_index,
2162                                       unsigned output_offset,
2163                                       const char* debug_hint_string) {
2164  if (trace_scope_ != nullptr) {
2165    Address output_address =
2166        reinterpret_cast<Address>(output_[frame_index]->GetTop()) +
2167        output_offset;
2168    PrintF(trace_scope_->file(),
2169           "    0x%08" V8PRIxPTR ": [top + %d] <- 0x%08" V8PRIxPTR " ;  %s",
2170           reinterpret_cast<intptr_t>(output_address), output_offset, value,
2171           debug_hint_string == nullptr ? "" : debug_hint_string);
2172  }
2173}
2174
2175unsigned Deoptimizer::ComputeInputFrameAboveFpFixedSize() const {
2176  unsigned fixed_size = CommonFrameConstants::kFixedFrameSizeAboveFp;
2177  if (!function_->IsSmi()) {
2178    fixed_size += ComputeIncomingArgumentSize(function_->shared());
2179  }
2180  return fixed_size;
2181}
2182
2183unsigned Deoptimizer::ComputeInputFrameSize() const {
2184  // The fp-to-sp delta already takes the context, constant pool pointer and the
2185  // function into account so we have to avoid double counting them.
2186  unsigned fixed_size_above_fp = ComputeInputFrameAboveFpFixedSize();
2187  unsigned result = fixed_size_above_fp + fp_to_sp_delta_;
2188  if (compiled_code_->kind() == Code::OPTIMIZED_FUNCTION) {
2189    unsigned stack_slots = compiled_code_->stack_slots();
2190    unsigned outgoing_size =
2191        ComputeOutgoingArgumentSize(compiled_code_, bailout_id_);
2192    CHECK_EQ(fixed_size_above_fp + (stack_slots * kPointerSize) -
2193                 CommonFrameConstants::kFixedFrameSizeAboveFp + outgoing_size,
2194             result);
2195  }
2196  return result;
2197}
2198
2199// static
2200unsigned Deoptimizer::ComputeJavascriptFixedSize(SharedFunctionInfo* shared) {
2201  // The fixed part of the frame consists of the return address, frame
2202  // pointer, function, context, and all the incoming arguments.
2203  return ComputeIncomingArgumentSize(shared) +
2204         StandardFrameConstants::kFixedFrameSize;
2205}
2206
2207// static
2208unsigned Deoptimizer::ComputeInterpretedFixedSize(SharedFunctionInfo* shared) {
2209  // The fixed part of the frame consists of the return address, frame
2210  // pointer, function, context, new.target, bytecode offset and all the
2211  // incoming arguments.
2212  return ComputeIncomingArgumentSize(shared) +
2213         InterpreterFrameConstants::kFixedFrameSize;
2214}
2215
2216// static
2217unsigned Deoptimizer::ComputeIncomingArgumentSize(SharedFunctionInfo* shared) {
2218  return (shared->internal_formal_parameter_count() + 1) * kPointerSize;
2219}
2220
2221
2222// static
2223unsigned Deoptimizer::ComputeOutgoingArgumentSize(Code* code,
2224                                                  unsigned bailout_id) {
2225  DeoptimizationInputData* data =
2226      DeoptimizationInputData::cast(code->deoptimization_data());
2227  unsigned height = data->ArgumentsStackHeight(bailout_id)->value();
2228  return height * kPointerSize;
2229}
2230
2231void Deoptimizer::EnsureCodeForDeoptimizationEntry(Isolate* isolate,
2232                                                   BailoutType type,
2233                                                   int max_entry_id) {
2234  // We cannot run this if the serializer is enabled because this will
2235  // cause us to emit relocation information for the external
2236  // references. This is fine because the deoptimizer's code section
2237  // isn't meant to be serialized at all.
2238  CHECK(type == EAGER || type == SOFT || type == LAZY);
2239  DeoptimizerData* data = isolate->deoptimizer_data();
2240  int entry_count = data->deopt_entry_code_entries_[type];
2241  if (max_entry_id < entry_count) return;
2242  entry_count = Max(entry_count, Deoptimizer::kMinNumberOfEntries);
2243  while (max_entry_id >= entry_count) entry_count *= 2;
2244  CHECK(entry_count <= Deoptimizer::kMaxNumberOfEntries);
2245
2246  MacroAssembler masm(isolate, NULL, 16 * KB, CodeObjectRequired::kYes);
2247  masm.set_emit_debug_code(false);
2248  GenerateDeoptimizationEntries(&masm, entry_count, type);
2249  CodeDesc desc;
2250  masm.GetCode(&desc);
2251  DCHECK(!RelocInfo::RequiresRelocation(desc));
2252
2253  MemoryChunk* chunk = data->deopt_entry_code_[type];
2254  CHECK(static_cast<int>(Deoptimizer::GetMaxDeoptTableSize()) >=
2255        desc.instr_size);
2256  if (!chunk->CommitArea(desc.instr_size)) {
2257    V8::FatalProcessOutOfMemory(
2258        "Deoptimizer::EnsureCodeForDeoptimizationEntry");
2259  }
2260  CopyBytes(chunk->area_start(), desc.buffer,
2261            static_cast<size_t>(desc.instr_size));
2262  Assembler::FlushICache(isolate, chunk->area_start(), desc.instr_size);
2263
2264  data->deopt_entry_code_entries_[type] = entry_count;
2265}
2266
2267FrameDescription::FrameDescription(uint32_t frame_size, int parameter_count)
2268    : frame_size_(frame_size),
2269      parameter_count_(parameter_count),
2270      top_(kZapUint32),
2271      pc_(kZapUint32),
2272      fp_(kZapUint32),
2273      context_(kZapUint32),
2274      constant_pool_(kZapUint32) {
2275  // Zap all the registers.
2276  for (int r = 0; r < Register::kNumRegisters; r++) {
2277    // TODO(jbramley): It isn't safe to use kZapUint32 here. If the register
2278    // isn't used before the next safepoint, the GC will try to scan it as a
2279    // tagged value. kZapUint32 looks like a valid tagged pointer, but it isn't.
2280    SetRegister(r, kZapUint32);
2281  }
2282
2283  // Zap all the slots.
2284  for (unsigned o = 0; o < frame_size; o += kPointerSize) {
2285    SetFrameSlot(o, kZapUint32);
2286  }
2287}
2288
2289void TranslationBuffer::Add(int32_t value) {
2290  // This wouldn't handle kMinInt correctly if it ever encountered it.
2291  DCHECK(value != kMinInt);
2292  // Encode the sign bit in the least significant bit.
2293  bool is_negative = (value < 0);
2294  uint32_t bits = ((is_negative ? -value : value) << 1) |
2295      static_cast<int32_t>(is_negative);
2296  // Encode the individual bytes using the least significant bit of
2297  // each byte to indicate whether or not more bytes follow.
2298  do {
2299    uint32_t next = bits >> 7;
2300    contents_.push_back(((bits << 1) & 0xFF) | (next != 0));
2301    bits = next;
2302  } while (bits != 0);
2303}
2304
2305
2306int32_t TranslationIterator::Next() {
2307  // Run through the bytes until we reach one with a least significant
2308  // bit of zero (marks the end).
2309  uint32_t bits = 0;
2310  for (int i = 0; true; i += 7) {
2311    DCHECK(HasNext());
2312    uint8_t next = buffer_->get(index_++);
2313    bits |= (next >> 1) << i;
2314    if ((next & 1) == 0) break;
2315  }
2316  // The bits encode the sign in the least significant bit.
2317  bool is_negative = (bits & 1) == 1;
2318  int32_t result = bits >> 1;
2319  return is_negative ? -result : result;
2320}
2321
2322
2323Handle<ByteArray> TranslationBuffer::CreateByteArray(Factory* factory) {
2324  Handle<ByteArray> result = factory->NewByteArray(CurrentIndex(), TENURED);
2325  contents_.CopyTo(result->GetDataStartAddress());
2326  return result;
2327}
2328
2329
2330void Translation::BeginConstructStubFrame(int literal_id, unsigned height) {
2331  buffer_->Add(CONSTRUCT_STUB_FRAME);
2332  buffer_->Add(literal_id);
2333  buffer_->Add(height);
2334}
2335
2336
2337void Translation::BeginGetterStubFrame(int literal_id) {
2338  buffer_->Add(GETTER_STUB_FRAME);
2339  buffer_->Add(literal_id);
2340}
2341
2342
2343void Translation::BeginSetterStubFrame(int literal_id) {
2344  buffer_->Add(SETTER_STUB_FRAME);
2345  buffer_->Add(literal_id);
2346}
2347
2348
2349void Translation::BeginArgumentsAdaptorFrame(int literal_id, unsigned height) {
2350  buffer_->Add(ARGUMENTS_ADAPTOR_FRAME);
2351  buffer_->Add(literal_id);
2352  buffer_->Add(height);
2353}
2354
2355void Translation::BeginTailCallerFrame(int literal_id) {
2356  buffer_->Add(TAIL_CALLER_FRAME);
2357  buffer_->Add(literal_id);
2358}
2359
2360void Translation::BeginJSFrame(BailoutId node_id,
2361                               int literal_id,
2362                               unsigned height) {
2363  buffer_->Add(JS_FRAME);
2364  buffer_->Add(node_id.ToInt());
2365  buffer_->Add(literal_id);
2366  buffer_->Add(height);
2367}
2368
2369
2370void Translation::BeginInterpretedFrame(BailoutId bytecode_offset,
2371                                        int literal_id, unsigned height) {
2372  buffer_->Add(INTERPRETED_FRAME);
2373  buffer_->Add(bytecode_offset.ToInt());
2374  buffer_->Add(literal_id);
2375  buffer_->Add(height);
2376}
2377
2378
2379void Translation::BeginCompiledStubFrame(int height) {
2380  buffer_->Add(COMPILED_STUB_FRAME);
2381  buffer_->Add(height);
2382}
2383
2384
2385void Translation::BeginArgumentsObject(int args_length) {
2386  buffer_->Add(ARGUMENTS_OBJECT);
2387  buffer_->Add(args_length);
2388}
2389
2390
2391void Translation::BeginCapturedObject(int length) {
2392  buffer_->Add(CAPTURED_OBJECT);
2393  buffer_->Add(length);
2394}
2395
2396
2397void Translation::DuplicateObject(int object_index) {
2398  buffer_->Add(DUPLICATED_OBJECT);
2399  buffer_->Add(object_index);
2400}
2401
2402
2403void Translation::StoreRegister(Register reg) {
2404  buffer_->Add(REGISTER);
2405  buffer_->Add(reg.code());
2406}
2407
2408
2409void Translation::StoreInt32Register(Register reg) {
2410  buffer_->Add(INT32_REGISTER);
2411  buffer_->Add(reg.code());
2412}
2413
2414
2415void Translation::StoreUint32Register(Register reg) {
2416  buffer_->Add(UINT32_REGISTER);
2417  buffer_->Add(reg.code());
2418}
2419
2420
2421void Translation::StoreBoolRegister(Register reg) {
2422  buffer_->Add(BOOL_REGISTER);
2423  buffer_->Add(reg.code());
2424}
2425
2426void Translation::StoreFloatRegister(FloatRegister reg) {
2427  buffer_->Add(FLOAT_REGISTER);
2428  buffer_->Add(reg.code());
2429}
2430
2431void Translation::StoreDoubleRegister(DoubleRegister reg) {
2432  buffer_->Add(DOUBLE_REGISTER);
2433  buffer_->Add(reg.code());
2434}
2435
2436
2437void Translation::StoreStackSlot(int index) {
2438  buffer_->Add(STACK_SLOT);
2439  buffer_->Add(index);
2440}
2441
2442
2443void Translation::StoreInt32StackSlot(int index) {
2444  buffer_->Add(INT32_STACK_SLOT);
2445  buffer_->Add(index);
2446}
2447
2448
2449void Translation::StoreUint32StackSlot(int index) {
2450  buffer_->Add(UINT32_STACK_SLOT);
2451  buffer_->Add(index);
2452}
2453
2454
2455void Translation::StoreBoolStackSlot(int index) {
2456  buffer_->Add(BOOL_STACK_SLOT);
2457  buffer_->Add(index);
2458}
2459
2460void Translation::StoreFloatStackSlot(int index) {
2461  buffer_->Add(FLOAT_STACK_SLOT);
2462  buffer_->Add(index);
2463}
2464
2465void Translation::StoreDoubleStackSlot(int index) {
2466  buffer_->Add(DOUBLE_STACK_SLOT);
2467  buffer_->Add(index);
2468}
2469
2470
2471void Translation::StoreLiteral(int literal_id) {
2472  buffer_->Add(LITERAL);
2473  buffer_->Add(literal_id);
2474}
2475
2476
2477void Translation::StoreArgumentsObject(bool args_known,
2478                                       int args_index,
2479                                       int args_length) {
2480  buffer_->Add(ARGUMENTS_OBJECT);
2481  buffer_->Add(args_known);
2482  buffer_->Add(args_index);
2483  buffer_->Add(args_length);
2484}
2485
2486
2487void Translation::StoreJSFrameFunction() {
2488  StoreStackSlot((StandardFrameConstants::kCallerPCOffset -
2489                  StandardFrameConstants::kFunctionOffset) /
2490                 kPointerSize);
2491}
2492
2493int Translation::NumberOfOperandsFor(Opcode opcode) {
2494  switch (opcode) {
2495    case GETTER_STUB_FRAME:
2496    case SETTER_STUB_FRAME:
2497    case DUPLICATED_OBJECT:
2498    case ARGUMENTS_OBJECT:
2499    case CAPTURED_OBJECT:
2500    case REGISTER:
2501    case INT32_REGISTER:
2502    case UINT32_REGISTER:
2503    case BOOL_REGISTER:
2504    case FLOAT_REGISTER:
2505    case DOUBLE_REGISTER:
2506    case STACK_SLOT:
2507    case INT32_STACK_SLOT:
2508    case UINT32_STACK_SLOT:
2509    case BOOL_STACK_SLOT:
2510    case FLOAT_STACK_SLOT:
2511    case DOUBLE_STACK_SLOT:
2512    case LITERAL:
2513    case COMPILED_STUB_FRAME:
2514    case TAIL_CALLER_FRAME:
2515      return 1;
2516    case BEGIN:
2517    case ARGUMENTS_ADAPTOR_FRAME:
2518    case CONSTRUCT_STUB_FRAME:
2519      return 2;
2520    case JS_FRAME:
2521    case INTERPRETED_FRAME:
2522      return 3;
2523  }
2524  FATAL("Unexpected translation type");
2525  return -1;
2526}
2527
2528
2529#if defined(OBJECT_PRINT) || defined(ENABLE_DISASSEMBLER)
2530
2531const char* Translation::StringFor(Opcode opcode) {
2532#define TRANSLATION_OPCODE_CASE(item)   case item: return #item;
2533  switch (opcode) {
2534    TRANSLATION_OPCODE_LIST(TRANSLATION_OPCODE_CASE)
2535  }
2536#undef TRANSLATION_OPCODE_CASE
2537  UNREACHABLE();
2538  return "";
2539}
2540
2541#endif
2542
2543
2544Handle<FixedArray> MaterializedObjectStore::Get(Address fp) {
2545  int index = StackIdToIndex(fp);
2546  if (index == -1) {
2547    return Handle<FixedArray>::null();
2548  }
2549  Handle<FixedArray> array = GetStackEntries();
2550  CHECK_GT(array->length(), index);
2551  return Handle<FixedArray>::cast(Handle<Object>(array->get(index), isolate()));
2552}
2553
2554
2555void MaterializedObjectStore::Set(Address fp,
2556                                  Handle<FixedArray> materialized_objects) {
2557  int index = StackIdToIndex(fp);
2558  if (index == -1) {
2559    index = frame_fps_.length();
2560    frame_fps_.Add(fp);
2561  }
2562
2563  Handle<FixedArray> array = EnsureStackEntries(index + 1);
2564  array->set(index, *materialized_objects);
2565}
2566
2567
2568bool MaterializedObjectStore::Remove(Address fp) {
2569  int index = StackIdToIndex(fp);
2570  if (index == -1) {
2571    return false;
2572  }
2573  CHECK_GE(index, 0);
2574
2575  frame_fps_.Remove(index);
2576  FixedArray* array = isolate()->heap()->materialized_objects();
2577  CHECK_LT(index, array->length());
2578  for (int i = index; i < frame_fps_.length(); i++) {
2579    array->set(i, array->get(i + 1));
2580  }
2581  array->set(frame_fps_.length(), isolate()->heap()->undefined_value());
2582  return true;
2583}
2584
2585
2586int MaterializedObjectStore::StackIdToIndex(Address fp) {
2587  for (int i = 0; i < frame_fps_.length(); i++) {
2588    if (frame_fps_[i] == fp) {
2589      return i;
2590    }
2591  }
2592  return -1;
2593}
2594
2595
2596Handle<FixedArray> MaterializedObjectStore::GetStackEntries() {
2597  return Handle<FixedArray>(isolate()->heap()->materialized_objects());
2598}
2599
2600
2601Handle<FixedArray> MaterializedObjectStore::EnsureStackEntries(int length) {
2602  Handle<FixedArray> array = GetStackEntries();
2603  if (array->length() >= length) {
2604    return array;
2605  }
2606
2607  int new_length = length > 10 ? length : 10;
2608  if (new_length < 2 * array->length()) {
2609    new_length = 2 * array->length();
2610  }
2611
2612  Handle<FixedArray> new_array =
2613      isolate()->factory()->NewFixedArray(new_length, TENURED);
2614  for (int i = 0; i < array->length(); i++) {
2615    new_array->set(i, array->get(i));
2616  }
2617  for (int i = array->length(); i < length; i++) {
2618    new_array->set(i, isolate()->heap()->undefined_value());
2619  }
2620  isolate()->heap()->SetRootMaterializedObjects(*new_array);
2621  return new_array;
2622}
2623
2624namespace {
2625
2626Handle<Object> GetValueForDebugger(TranslatedFrame::iterator it,
2627                                   Isolate* isolate) {
2628  if (it->GetRawValue() == isolate->heap()->arguments_marker()) {
2629    if (!it->IsMaterializableByDebugger()) {
2630      return isolate->factory()->undefined_value();
2631    }
2632  }
2633  return it->GetValue();
2634}
2635
2636}  // namespace
2637
2638DeoptimizedFrameInfo::DeoptimizedFrameInfo(TranslatedState* state,
2639                                           TranslatedState::iterator frame_it,
2640                                           Isolate* isolate) {
2641  // If the previous frame is an adaptor frame, we will take the parameters
2642  // from there.
2643  TranslatedState::iterator parameter_frame = frame_it;
2644  if (parameter_frame != state->begin()) {
2645    parameter_frame--;
2646  }
2647  int parameter_count;
2648  if (parameter_frame->kind() == TranslatedFrame::kArgumentsAdaptor) {
2649    parameter_count = parameter_frame->height() - 1;  // Ignore the receiver.
2650  } else {
2651    parameter_frame = frame_it;
2652    parameter_count =
2653        frame_it->shared_info()->internal_formal_parameter_count();
2654  }
2655  TranslatedFrame::iterator parameter_it = parameter_frame->begin();
2656  parameter_it++;  // Skip the function.
2657  parameter_it++;  // Skip the receiver.
2658
2659  // Figure out whether there is a construct stub frame on top of
2660  // the parameter frame.
2661  has_construct_stub_ =
2662      parameter_frame != state->begin() &&
2663      (parameter_frame - 1)->kind() == TranslatedFrame::kConstructStub;
2664
2665  if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2666    source_position_ = Deoptimizer::ComputeSourcePositionFromBytecodeArray(
2667        *frame_it->shared_info(), frame_it->node_id());
2668  } else {
2669    DCHECK_EQ(TranslatedFrame::kFunction, frame_it->kind());
2670    source_position_ = Deoptimizer::ComputeSourcePositionFromBaselineCode(
2671        *frame_it->shared_info(), frame_it->node_id());
2672  }
2673
2674  TranslatedFrame::iterator value_it = frame_it->begin();
2675  // Get the function. Note that this might materialize the function.
2676  // In case the debugger mutates this value, we should deoptimize
2677  // the function and remember the value in the materialized value store.
2678  function_ = Handle<JSFunction>::cast(value_it->GetValue());
2679
2680  parameters_.resize(static_cast<size_t>(parameter_count));
2681  for (int i = 0; i < parameter_count; i++) {
2682    Handle<Object> parameter = GetValueForDebugger(parameter_it, isolate);
2683    SetParameter(i, parameter);
2684    parameter_it++;
2685  }
2686
2687  // Skip the function, the receiver and the arguments.
2688  int skip_count =
2689      frame_it->shared_info()->internal_formal_parameter_count() + 2;
2690  TranslatedFrame::iterator stack_it = frame_it->begin();
2691  for (int i = 0; i < skip_count; i++) {
2692    stack_it++;
2693  }
2694
2695  // Get the context.
2696  context_ = GetValueForDebugger(stack_it, isolate);
2697  stack_it++;
2698
2699  // Get the expression stack.
2700  int stack_height = frame_it->height();
2701  if (frame_it->kind() == TranslatedFrame::kFunction ||
2702      frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2703    // For full-code frames, we should not count the context.
2704    // For interpreter frames, we should not count the accumulator.
2705    // TODO(jarin): Clean up the indexing in translated frames.
2706    stack_height--;
2707  }
2708  expression_stack_.resize(static_cast<size_t>(stack_height));
2709  for (int i = 0; i < stack_height; i++) {
2710    Handle<Object> expression = GetValueForDebugger(stack_it, isolate);
2711    SetExpression(i, expression);
2712    stack_it++;
2713  }
2714
2715  // For interpreter frame, skip the accumulator.
2716  if (frame_it->kind() == TranslatedFrame::kInterpretedFunction) {
2717    stack_it++;
2718  }
2719  CHECK(stack_it == frame_it->end());
2720}
2721
2722
2723Deoptimizer::DeoptInfo Deoptimizer::GetDeoptInfo(Code* code, Address pc) {
2724  SourcePosition last_position = SourcePosition::Unknown();
2725  DeoptimizeReason last_reason = DeoptimizeReason::kNoReason;
2726  int last_deopt_id = kNoDeoptimizationId;
2727  int mask = RelocInfo::ModeMask(RelocInfo::DEOPT_REASON) |
2728             RelocInfo::ModeMask(RelocInfo::DEOPT_ID) |
2729             RelocInfo::ModeMask(RelocInfo::DEOPT_SCRIPT_OFFSET) |
2730             RelocInfo::ModeMask(RelocInfo::DEOPT_INLINING_ID);
2731  for (RelocIterator it(code, mask); !it.done(); it.next()) {
2732    RelocInfo* info = it.rinfo();
2733    if (info->pc() >= pc) {
2734      return DeoptInfo(last_position, last_reason, last_deopt_id);
2735    }
2736    if (info->rmode() == RelocInfo::DEOPT_SCRIPT_OFFSET) {
2737      int script_offset = static_cast<int>(info->data());
2738      it.next();
2739      DCHECK(it.rinfo()->rmode() == RelocInfo::DEOPT_INLINING_ID);
2740      int inlining_id = static_cast<int>(it.rinfo()->data());
2741      last_position = SourcePosition(script_offset, inlining_id);
2742    } else if (info->rmode() == RelocInfo::DEOPT_ID) {
2743      last_deopt_id = static_cast<int>(info->data());
2744    } else if (info->rmode() == RelocInfo::DEOPT_REASON) {
2745      last_reason = static_cast<DeoptimizeReason>(info->data());
2746    }
2747  }
2748  return DeoptInfo(SourcePosition::Unknown(), DeoptimizeReason::kNoReason, -1);
2749}
2750
2751
2752// static
2753int Deoptimizer::ComputeSourcePositionFromBaselineCode(
2754    SharedFunctionInfo* shared, BailoutId node_id) {
2755  DCHECK(shared->HasBaselineCode());
2756  Code* code = shared->code();
2757  FixedArray* raw_data = code->deoptimization_data();
2758  DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
2759  unsigned pc_and_state = Deoptimizer::GetOutputInfo(data, node_id, shared);
2760  int code_offset =
2761      static_cast<int>(FullCodeGenerator::PcField::decode(pc_and_state));
2762  return AbstractCode::cast(code)->SourcePosition(code_offset);
2763}
2764
2765// static
2766int Deoptimizer::ComputeSourcePositionFromBytecodeArray(
2767    SharedFunctionInfo* shared, BailoutId node_id) {
2768  DCHECK(shared->HasBytecodeArray());
2769  return AbstractCode::cast(shared->bytecode_array())
2770      ->SourcePosition(node_id.ToInt());
2771}
2772
2773// static
2774TranslatedValue TranslatedValue::NewArgumentsObject(TranslatedState* container,
2775                                                    int length,
2776                                                    int object_index) {
2777  TranslatedValue slot(container, kArgumentsObject);
2778  slot.materialization_info_ = {object_index, length};
2779  return slot;
2780}
2781
2782
2783// static
2784TranslatedValue TranslatedValue::NewDeferredObject(TranslatedState* container,
2785                                                   int length,
2786                                                   int object_index) {
2787  TranslatedValue slot(container, kCapturedObject);
2788  slot.materialization_info_ = {object_index, length};
2789  return slot;
2790}
2791
2792
2793// static
2794TranslatedValue TranslatedValue::NewDuplicateObject(TranslatedState* container,
2795                                                    int id) {
2796  TranslatedValue slot(container, kDuplicatedObject);
2797  slot.materialization_info_ = {id, -1};
2798  return slot;
2799}
2800
2801
2802// static
2803TranslatedValue TranslatedValue::NewFloat(TranslatedState* container,
2804                                          float value) {
2805  TranslatedValue slot(container, kFloat);
2806  slot.float_value_ = value;
2807  return slot;
2808}
2809
2810// static
2811TranslatedValue TranslatedValue::NewDouble(TranslatedState* container,
2812                                           double value) {
2813  TranslatedValue slot(container, kDouble);
2814  slot.double_value_ = value;
2815  return slot;
2816}
2817
2818
2819// static
2820TranslatedValue TranslatedValue::NewInt32(TranslatedState* container,
2821                                          int32_t value) {
2822  TranslatedValue slot(container, kInt32);
2823  slot.int32_value_ = value;
2824  return slot;
2825}
2826
2827
2828// static
2829TranslatedValue TranslatedValue::NewUInt32(TranslatedState* container,
2830                                           uint32_t value) {
2831  TranslatedValue slot(container, kUInt32);
2832  slot.uint32_value_ = value;
2833  return slot;
2834}
2835
2836
2837// static
2838TranslatedValue TranslatedValue::NewBool(TranslatedState* container,
2839                                         uint32_t value) {
2840  TranslatedValue slot(container, kBoolBit);
2841  slot.uint32_value_ = value;
2842  return slot;
2843}
2844
2845
2846// static
2847TranslatedValue TranslatedValue::NewTagged(TranslatedState* container,
2848                                           Object* literal) {
2849  TranslatedValue slot(container, kTagged);
2850  slot.raw_literal_ = literal;
2851  return slot;
2852}
2853
2854
2855// static
2856TranslatedValue TranslatedValue::NewInvalid(TranslatedState* container) {
2857  return TranslatedValue(container, kInvalid);
2858}
2859
2860
2861Isolate* TranslatedValue::isolate() const { return container_->isolate(); }
2862
2863
2864Object* TranslatedValue::raw_literal() const {
2865  DCHECK_EQ(kTagged, kind());
2866  return raw_literal_;
2867}
2868
2869
2870int32_t TranslatedValue::int32_value() const {
2871  DCHECK_EQ(kInt32, kind());
2872  return int32_value_;
2873}
2874
2875
2876uint32_t TranslatedValue::uint32_value() const {
2877  DCHECK(kind() == kUInt32 || kind() == kBoolBit);
2878  return uint32_value_;
2879}
2880
2881float TranslatedValue::float_value() const {
2882  DCHECK_EQ(kFloat, kind());
2883  return float_value_;
2884}
2885
2886double TranslatedValue::double_value() const {
2887  DCHECK_EQ(kDouble, kind());
2888  return double_value_;
2889}
2890
2891
2892int TranslatedValue::object_length() const {
2893  DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject);
2894  return materialization_info_.length_;
2895}
2896
2897
2898int TranslatedValue::object_index() const {
2899  DCHECK(kind() == kArgumentsObject || kind() == kCapturedObject ||
2900         kind() == kDuplicatedObject);
2901  return materialization_info_.id_;
2902}
2903
2904
2905Object* TranslatedValue::GetRawValue() const {
2906  // If we have a value, return it.
2907  Handle<Object> result_handle;
2908  if (value_.ToHandle(&result_handle)) {
2909    return *result_handle;
2910  }
2911
2912  // Otherwise, do a best effort to get the value without allocation.
2913  switch (kind()) {
2914    case kTagged:
2915      return raw_literal();
2916
2917    case kInt32: {
2918      bool is_smi = Smi::IsValid(int32_value());
2919      if (is_smi) {
2920        return Smi::FromInt(int32_value());
2921      }
2922      break;
2923    }
2924
2925    case kUInt32: {
2926      bool is_smi = (uint32_value() <= static_cast<uintptr_t>(Smi::kMaxValue));
2927      if (is_smi) {
2928        return Smi::FromInt(static_cast<int32_t>(uint32_value()));
2929      }
2930      break;
2931    }
2932
2933    case kBoolBit: {
2934      if (uint32_value() == 0) {
2935        return isolate()->heap()->false_value();
2936      } else {
2937        CHECK_EQ(1U, uint32_value());
2938        return isolate()->heap()->true_value();
2939      }
2940    }
2941
2942    default:
2943      break;
2944  }
2945
2946  // If we could not get the value without allocation, return the arguments
2947  // marker.
2948  return isolate()->heap()->arguments_marker();
2949}
2950
2951
2952Handle<Object> TranslatedValue::GetValue() {
2953  Handle<Object> result;
2954  // If we already have a value, then get it.
2955  if (value_.ToHandle(&result)) return result;
2956
2957  // Otherwise we have to materialize.
2958  switch (kind()) {
2959    case TranslatedValue::kTagged:
2960    case TranslatedValue::kInt32:
2961    case TranslatedValue::kUInt32:
2962    case TranslatedValue::kBoolBit:
2963    case TranslatedValue::kFloat:
2964    case TranslatedValue::kDouble: {
2965      MaterializeSimple();
2966      return value_.ToHandleChecked();
2967    }
2968
2969    case TranslatedValue::kArgumentsObject:
2970    case TranslatedValue::kCapturedObject:
2971    case TranslatedValue::kDuplicatedObject:
2972      return container_->MaterializeObjectAt(object_index());
2973
2974    case TranslatedValue::kInvalid:
2975      FATAL("unexpected case");
2976      return Handle<Object>::null();
2977  }
2978
2979  FATAL("internal error: value missing");
2980  return Handle<Object>::null();
2981}
2982
2983
2984void TranslatedValue::MaterializeSimple() {
2985  // If we already have materialized, return.
2986  if (!value_.is_null()) return;
2987
2988  Object* raw_value = GetRawValue();
2989  if (raw_value != isolate()->heap()->arguments_marker()) {
2990    // We can get the value without allocation, just return it here.
2991    value_ = Handle<Object>(raw_value, isolate());
2992    return;
2993  }
2994
2995  switch (kind()) {
2996    case kInt32: {
2997      value_ = Handle<Object>(isolate()->factory()->NewNumber(int32_value()));
2998      return;
2999    }
3000
3001    case kUInt32:
3002      value_ = Handle<Object>(isolate()->factory()->NewNumber(uint32_value()));
3003      return;
3004
3005    case kFloat:
3006      value_ = Handle<Object>(isolate()->factory()->NewNumber(float_value()));
3007      return;
3008
3009    case kDouble:
3010      value_ = Handle<Object>(isolate()->factory()->NewNumber(double_value()));
3011      return;
3012
3013    case kCapturedObject:
3014    case kDuplicatedObject:
3015    case kArgumentsObject:
3016    case kInvalid:
3017    case kTagged:
3018    case kBoolBit:
3019      FATAL("internal error: unexpected materialization.");
3020      break;
3021  }
3022}
3023
3024
3025bool TranslatedValue::IsMaterializedObject() const {
3026  switch (kind()) {
3027    case kCapturedObject:
3028    case kDuplicatedObject:
3029    case kArgumentsObject:
3030      return true;
3031    default:
3032      return false;
3033  }
3034}
3035
3036bool TranslatedValue::IsMaterializableByDebugger() const {
3037  // At the moment, we only allow materialization of doubles.
3038  return (kind() == kDouble);
3039}
3040
3041int TranslatedValue::GetChildrenCount() const {
3042  if (kind() == kCapturedObject || kind() == kArgumentsObject) {
3043    return object_length();
3044  } else {
3045    return 0;
3046  }
3047}
3048
3049
3050uint32_t TranslatedState::GetUInt32Slot(Address fp, int slot_offset) {
3051  Address address = fp + slot_offset;
3052#if V8_TARGET_BIG_ENDIAN && V8_HOST_ARCH_64_BIT
3053  return Memory::uint32_at(address + kIntSize);
3054#else
3055  return Memory::uint32_at(address);
3056#endif
3057}
3058
3059
3060void TranslatedValue::Handlify() {
3061  if (kind() == kTagged) {
3062    value_ = Handle<Object>(raw_literal(), isolate());
3063    raw_literal_ = nullptr;
3064  }
3065}
3066
3067
3068TranslatedFrame TranslatedFrame::JSFrame(BailoutId node_id,
3069                                         SharedFunctionInfo* shared_info,
3070                                         int height) {
3071  TranslatedFrame frame(kFunction, shared_info->GetIsolate(), shared_info,
3072                        height);
3073  frame.node_id_ = node_id;
3074  return frame;
3075}
3076
3077
3078TranslatedFrame TranslatedFrame::InterpretedFrame(
3079    BailoutId bytecode_offset, SharedFunctionInfo* shared_info, int height) {
3080  TranslatedFrame frame(kInterpretedFunction, shared_info->GetIsolate(),
3081                        shared_info, height);
3082  frame.node_id_ = bytecode_offset;
3083  return frame;
3084}
3085
3086
3087TranslatedFrame TranslatedFrame::AccessorFrame(
3088    Kind kind, SharedFunctionInfo* shared_info) {
3089  DCHECK(kind == kSetter || kind == kGetter);
3090  return TranslatedFrame(kind, shared_info->GetIsolate(), shared_info);
3091}
3092
3093
3094TranslatedFrame TranslatedFrame::ArgumentsAdaptorFrame(
3095    SharedFunctionInfo* shared_info, int height) {
3096  return TranslatedFrame(kArgumentsAdaptor, shared_info->GetIsolate(),
3097                         shared_info, height);
3098}
3099
3100TranslatedFrame TranslatedFrame::TailCallerFrame(
3101    SharedFunctionInfo* shared_info) {
3102  return TranslatedFrame(kTailCallerFunction, shared_info->GetIsolate(),
3103                         shared_info, 0);
3104}
3105
3106TranslatedFrame TranslatedFrame::ConstructStubFrame(
3107    SharedFunctionInfo* shared_info, int height) {
3108  return TranslatedFrame(kConstructStub, shared_info->GetIsolate(), shared_info,
3109                         height);
3110}
3111
3112
3113int TranslatedFrame::GetValueCount() {
3114  switch (kind()) {
3115    case kFunction: {
3116      int parameter_count =
3117          raw_shared_info_->internal_formal_parameter_count() + 1;
3118      // + 1 for function.
3119      return height_ + parameter_count + 1;
3120    }
3121
3122    case kInterpretedFunction: {
3123      int parameter_count =
3124          raw_shared_info_->internal_formal_parameter_count() + 1;
3125      // + 2 for function and context.
3126      return height_ + parameter_count + 2;
3127    }
3128
3129    case kGetter:
3130      return 2;  // Function and receiver.
3131
3132    case kSetter:
3133      return 3;  // Function, receiver and the value to set.
3134
3135    case kArgumentsAdaptor:
3136    case kConstructStub:
3137      return 1 + height_;
3138
3139    case kTailCallerFunction:
3140      return 1;  // Function.
3141
3142    case kCompiledStub:
3143      return height_;
3144
3145    case kInvalid:
3146      UNREACHABLE();
3147      break;
3148  }
3149  UNREACHABLE();
3150  return -1;
3151}
3152
3153
3154void TranslatedFrame::Handlify() {
3155  if (raw_shared_info_ != nullptr) {
3156    shared_info_ = Handle<SharedFunctionInfo>(raw_shared_info_);
3157    raw_shared_info_ = nullptr;
3158  }
3159  for (auto& value : values_) {
3160    value.Handlify();
3161  }
3162}
3163
3164
3165TranslatedFrame TranslatedState::CreateNextTranslatedFrame(
3166    TranslationIterator* iterator, FixedArray* literal_array, Address fp,
3167    FILE* trace_file) {
3168  Translation::Opcode opcode =
3169      static_cast<Translation::Opcode>(iterator->Next());
3170  switch (opcode) {
3171    case Translation::JS_FRAME: {
3172      BailoutId node_id = BailoutId(iterator->Next());
3173      SharedFunctionInfo* shared_info =
3174          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3175      int height = iterator->Next();
3176      if (trace_file != nullptr) {
3177        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3178        PrintF(trace_file, "  reading input frame %s", name.get());
3179        int arg_count = shared_info->internal_formal_parameter_count() + 1;
3180        PrintF(trace_file, " => node=%d, args=%d, height=%d; inputs:\n",
3181               node_id.ToInt(), arg_count, height);
3182      }
3183      return TranslatedFrame::JSFrame(node_id, shared_info, height);
3184    }
3185
3186    case Translation::INTERPRETED_FRAME: {
3187      BailoutId bytecode_offset = BailoutId(iterator->Next());
3188      SharedFunctionInfo* shared_info =
3189          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3190      int height = iterator->Next();
3191      if (trace_file != nullptr) {
3192        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3193        PrintF(trace_file, "  reading input frame %s", name.get());
3194        int arg_count = shared_info->internal_formal_parameter_count() + 1;
3195        PrintF(trace_file,
3196               " => bytecode_offset=%d, args=%d, height=%d; inputs:\n",
3197               bytecode_offset.ToInt(), arg_count, height);
3198      }
3199      return TranslatedFrame::InterpretedFrame(bytecode_offset, shared_info,
3200                                               height);
3201    }
3202
3203    case Translation::ARGUMENTS_ADAPTOR_FRAME: {
3204      SharedFunctionInfo* shared_info =
3205          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3206      int height = iterator->Next();
3207      if (trace_file != nullptr) {
3208        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3209        PrintF(trace_file, "  reading arguments adaptor frame %s", name.get());
3210        PrintF(trace_file, " => height=%d; inputs:\n", height);
3211      }
3212      return TranslatedFrame::ArgumentsAdaptorFrame(shared_info, height);
3213    }
3214
3215    case Translation::TAIL_CALLER_FRAME: {
3216      SharedFunctionInfo* shared_info =
3217          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3218      if (trace_file != nullptr) {
3219        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3220        PrintF(trace_file, "  reading tail caller frame marker %s\n",
3221               name.get());
3222      }
3223      return TranslatedFrame::TailCallerFrame(shared_info);
3224    }
3225
3226    case Translation::CONSTRUCT_STUB_FRAME: {
3227      SharedFunctionInfo* shared_info =
3228          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3229      int height = iterator->Next();
3230      if (trace_file != nullptr) {
3231        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3232        PrintF(trace_file, "  reading construct stub frame %s", name.get());
3233        PrintF(trace_file, " => height=%d; inputs:\n", height);
3234      }
3235      return TranslatedFrame::ConstructStubFrame(shared_info, height);
3236    }
3237
3238    case Translation::GETTER_STUB_FRAME: {
3239      SharedFunctionInfo* shared_info =
3240          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3241      if (trace_file != nullptr) {
3242        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3243        PrintF(trace_file, "  reading getter frame %s; inputs:\n", name.get());
3244      }
3245      return TranslatedFrame::AccessorFrame(TranslatedFrame::kGetter,
3246                                            shared_info);
3247    }
3248
3249    case Translation::SETTER_STUB_FRAME: {
3250      SharedFunctionInfo* shared_info =
3251          SharedFunctionInfo::cast(literal_array->get(iterator->Next()));
3252      if (trace_file != nullptr) {
3253        std::unique_ptr<char[]> name = shared_info->DebugName()->ToCString();
3254        PrintF(trace_file, "  reading setter frame %s; inputs:\n", name.get());
3255      }
3256      return TranslatedFrame::AccessorFrame(TranslatedFrame::kSetter,
3257                                            shared_info);
3258    }
3259
3260    case Translation::COMPILED_STUB_FRAME: {
3261      int height = iterator->Next();
3262      if (trace_file != nullptr) {
3263        PrintF(trace_file,
3264               "  reading compiler stub frame => height=%d; inputs:\n", height);
3265      }
3266      return TranslatedFrame::CompiledStubFrame(height,
3267                                                literal_array->GetIsolate());
3268    }
3269
3270    case Translation::BEGIN:
3271    case Translation::DUPLICATED_OBJECT:
3272    case Translation::ARGUMENTS_OBJECT:
3273    case Translation::CAPTURED_OBJECT:
3274    case Translation::REGISTER:
3275    case Translation::INT32_REGISTER:
3276    case Translation::UINT32_REGISTER:
3277    case Translation::BOOL_REGISTER:
3278    case Translation::FLOAT_REGISTER:
3279    case Translation::DOUBLE_REGISTER:
3280    case Translation::STACK_SLOT:
3281    case Translation::INT32_STACK_SLOT:
3282    case Translation::UINT32_STACK_SLOT:
3283    case Translation::BOOL_STACK_SLOT:
3284    case Translation::FLOAT_STACK_SLOT:
3285    case Translation::DOUBLE_STACK_SLOT:
3286    case Translation::LITERAL:
3287      break;
3288  }
3289  FATAL("We should never get here - unexpected deopt info.");
3290  return TranslatedFrame::InvalidFrame();
3291}
3292
3293
3294// static
3295void TranslatedFrame::AdvanceIterator(
3296    std::deque<TranslatedValue>::iterator* iter) {
3297  int values_to_skip = 1;
3298  while (values_to_skip > 0) {
3299    // Consume the current element.
3300    values_to_skip--;
3301    // Add all the children.
3302    values_to_skip += (*iter)->GetChildrenCount();
3303
3304    (*iter)++;
3305  }
3306}
3307
3308
3309// We can't intermix stack decoding and allocations because
3310// deoptimization infrastracture is not GC safe.
3311// Thus we build a temporary structure in malloced space.
3312TranslatedValue TranslatedState::CreateNextTranslatedValue(
3313    int frame_index, int value_index, TranslationIterator* iterator,
3314    FixedArray* literal_array, Address fp, RegisterValues* registers,
3315    FILE* trace_file) {
3316  disasm::NameConverter converter;
3317
3318  Translation::Opcode opcode =
3319      static_cast<Translation::Opcode>(iterator->Next());
3320  switch (opcode) {
3321    case Translation::BEGIN:
3322    case Translation::JS_FRAME:
3323    case Translation::INTERPRETED_FRAME:
3324    case Translation::ARGUMENTS_ADAPTOR_FRAME:
3325    case Translation::TAIL_CALLER_FRAME:
3326    case Translation::CONSTRUCT_STUB_FRAME:
3327    case Translation::GETTER_STUB_FRAME:
3328    case Translation::SETTER_STUB_FRAME:
3329    case Translation::COMPILED_STUB_FRAME:
3330      // Peeled off before getting here.
3331      break;
3332
3333    case Translation::DUPLICATED_OBJECT: {
3334      int object_id = iterator->Next();
3335      if (trace_file != nullptr) {
3336        PrintF(trace_file, "duplicated object #%d", object_id);
3337      }
3338      object_positions_.push_back(object_positions_[object_id]);
3339      return TranslatedValue::NewDuplicateObject(this, object_id);
3340    }
3341
3342    case Translation::ARGUMENTS_OBJECT: {
3343      int arg_count = iterator->Next();
3344      int object_index = static_cast<int>(object_positions_.size());
3345      if (trace_file != nullptr) {
3346        PrintF(trace_file, "argumets object #%d (length = %d)", object_index,
3347               arg_count);
3348      }
3349      object_positions_.push_back({frame_index, value_index});
3350      return TranslatedValue::NewArgumentsObject(this, arg_count, object_index);
3351    }
3352
3353    case Translation::CAPTURED_OBJECT: {
3354      int field_count = iterator->Next();
3355      int object_index = static_cast<int>(object_positions_.size());
3356      if (trace_file != nullptr) {
3357        PrintF(trace_file, "captured object #%d (length = %d)", object_index,
3358               field_count);
3359      }
3360      object_positions_.push_back({frame_index, value_index});
3361      return TranslatedValue::NewDeferredObject(this, field_count,
3362                                                object_index);
3363    }
3364
3365    case Translation::REGISTER: {
3366      int input_reg = iterator->Next();
3367      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3368      intptr_t value = registers->GetRegister(input_reg);
3369      if (trace_file != nullptr) {
3370        PrintF(trace_file, "0x%08" V8PRIxPTR " ; %s ", value,
3371               converter.NameOfCPURegister(input_reg));
3372        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3373      }
3374      return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3375    }
3376
3377    case Translation::INT32_REGISTER: {
3378      int input_reg = iterator->Next();
3379      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3380      intptr_t value = registers->GetRegister(input_reg);
3381      if (trace_file != nullptr) {
3382        PrintF(trace_file, "%" V8PRIdPTR " ; %s ", value,
3383               converter.NameOfCPURegister(input_reg));
3384      }
3385      return TranslatedValue::NewInt32(this, static_cast<int32_t>(value));
3386    }
3387
3388    case Translation::UINT32_REGISTER: {
3389      int input_reg = iterator->Next();
3390      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3391      intptr_t value = registers->GetRegister(input_reg);
3392      if (trace_file != nullptr) {
3393        PrintF(trace_file, "%" V8PRIuPTR " ; %s (uint)", value,
3394               converter.NameOfCPURegister(input_reg));
3395        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3396      }
3397      return TranslatedValue::NewUInt32(this, static_cast<uint32_t>(value));
3398    }
3399
3400    case Translation::BOOL_REGISTER: {
3401      int input_reg = iterator->Next();
3402      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3403      intptr_t value = registers->GetRegister(input_reg);
3404      if (trace_file != nullptr) {
3405        PrintF(trace_file, "%" V8PRIdPTR " ; %s (bool)", value,
3406               converter.NameOfCPURegister(input_reg));
3407      }
3408      return TranslatedValue::NewBool(this, static_cast<uint32_t>(value));
3409    }
3410
3411    case Translation::FLOAT_REGISTER: {
3412      int input_reg = iterator->Next();
3413      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3414      float value = registers->GetFloatRegister(input_reg);
3415      if (trace_file != nullptr) {
3416        PrintF(trace_file, "%e ; %s (float)", value,
3417               RegisterConfiguration::Crankshaft()->GetFloatRegisterName(
3418                   input_reg));
3419      }
3420      return TranslatedValue::NewFloat(this, value);
3421    }
3422
3423    case Translation::DOUBLE_REGISTER: {
3424      int input_reg = iterator->Next();
3425      if (registers == nullptr) return TranslatedValue::NewInvalid(this);
3426      double value = registers->GetDoubleRegister(input_reg);
3427      if (trace_file != nullptr) {
3428        PrintF(trace_file, "%e ; %s (double)", value,
3429               RegisterConfiguration::Crankshaft()->GetDoubleRegisterName(
3430                   input_reg));
3431      }
3432      return TranslatedValue::NewDouble(this, value);
3433    }
3434
3435    case Translation::STACK_SLOT: {
3436      int slot_offset =
3437          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3438      intptr_t value = *(reinterpret_cast<intptr_t*>(fp + slot_offset));
3439      if (trace_file != nullptr) {
3440        PrintF(trace_file, "0x%08" V8PRIxPTR " ; [fp %c %d] ", value,
3441               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3442        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3443      }
3444      return TranslatedValue::NewTagged(this, reinterpret_cast<Object*>(value));
3445    }
3446
3447    case Translation::INT32_STACK_SLOT: {
3448      int slot_offset =
3449          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3450      uint32_t value = GetUInt32Slot(fp, slot_offset);
3451      if (trace_file != nullptr) {
3452        PrintF(trace_file, "%d ; (int) [fp %c %d] ",
3453               static_cast<int32_t>(value), slot_offset < 0 ? '-' : '+',
3454               std::abs(slot_offset));
3455      }
3456      return TranslatedValue::NewInt32(this, value);
3457    }
3458
3459    case Translation::UINT32_STACK_SLOT: {
3460      int slot_offset =
3461          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3462      uint32_t value = GetUInt32Slot(fp, slot_offset);
3463      if (trace_file != nullptr) {
3464        PrintF(trace_file, "%u ; (uint) [fp %c %d] ", value,
3465               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3466      }
3467      return TranslatedValue::NewUInt32(this, value);
3468    }
3469
3470    case Translation::BOOL_STACK_SLOT: {
3471      int slot_offset =
3472          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3473      uint32_t value = GetUInt32Slot(fp, slot_offset);
3474      if (trace_file != nullptr) {
3475        PrintF(trace_file, "%u ; (bool) [fp %c %d] ", value,
3476               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3477      }
3478      return TranslatedValue::NewBool(this, value);
3479    }
3480
3481    case Translation::FLOAT_STACK_SLOT: {
3482      int slot_offset =
3483          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3484      float value = ReadFloatValue(fp + slot_offset);
3485      if (trace_file != nullptr) {
3486        PrintF(trace_file, "%e ; (float) [fp %c %d] ", value,
3487               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3488      }
3489      return TranslatedValue::NewFloat(this, value);
3490    }
3491
3492    case Translation::DOUBLE_STACK_SLOT: {
3493      int slot_offset =
3494          OptimizedFrame::StackSlotOffsetRelativeToFp(iterator->Next());
3495      double value = ReadDoubleValue(fp + slot_offset);
3496      if (trace_file != nullptr) {
3497        PrintF(trace_file, "%e ; (double) [fp %c %d] ", value,
3498               slot_offset < 0 ? '-' : '+', std::abs(slot_offset));
3499      }
3500      return TranslatedValue::NewDouble(this, value);
3501    }
3502
3503    case Translation::LITERAL: {
3504      int literal_index = iterator->Next();
3505      Object* value = literal_array->get(literal_index);
3506      if (trace_file != nullptr) {
3507        PrintF(trace_file, "0x%08" V8PRIxPTR " ; (literal %d) ",
3508               reinterpret_cast<intptr_t>(value), literal_index);
3509        reinterpret_cast<Object*>(value)->ShortPrint(trace_file);
3510      }
3511
3512      return TranslatedValue::NewTagged(this, value);
3513    }
3514  }
3515
3516  FATAL("We should never get here - unexpected deopt info.");
3517  return TranslatedValue(nullptr, TranslatedValue::kInvalid);
3518}
3519
3520
3521TranslatedState::TranslatedState(JavaScriptFrame* frame)
3522    : isolate_(nullptr),
3523      stack_frame_pointer_(nullptr),
3524      has_adapted_arguments_(false) {
3525  int deopt_index = Safepoint::kNoDeoptimizationIndex;
3526  DeoptimizationInputData* data =
3527      static_cast<OptimizedFrame*>(frame)->GetDeoptimizationData(&deopt_index);
3528  DCHECK(data != nullptr && deopt_index != Safepoint::kNoDeoptimizationIndex);
3529  TranslationIterator it(data->TranslationByteArray(),
3530                         data->TranslationIndex(deopt_index)->value());
3531  Init(frame->fp(), &it, data->LiteralArray(), nullptr /* registers */,
3532       nullptr /* trace file */);
3533}
3534
3535
3536TranslatedState::TranslatedState()
3537    : isolate_(nullptr),
3538      stack_frame_pointer_(nullptr),
3539      has_adapted_arguments_(false) {}
3540
3541
3542void TranslatedState::Init(Address input_frame_pointer,
3543                           TranslationIterator* iterator,
3544                           FixedArray* literal_array, RegisterValues* registers,
3545                           FILE* trace_file) {
3546  DCHECK(frames_.empty());
3547
3548  isolate_ = literal_array->GetIsolate();
3549  // Read out the 'header' translation.
3550  Translation::Opcode opcode =
3551      static_cast<Translation::Opcode>(iterator->Next());
3552  CHECK(opcode == Translation::BEGIN);
3553
3554  int count = iterator->Next();
3555  iterator->Next();  // Drop JS frames count.
3556
3557  frames_.reserve(count);
3558
3559  std::stack<int> nested_counts;
3560
3561  // Read the frames
3562  for (int i = 0; i < count; i++) {
3563    // Read the frame descriptor.
3564    frames_.push_back(CreateNextTranslatedFrame(
3565        iterator, literal_array, input_frame_pointer, trace_file));
3566    TranslatedFrame& frame = frames_.back();
3567
3568    // Read the values.
3569    int values_to_process = frame.GetValueCount();
3570    while (values_to_process > 0 || !nested_counts.empty()) {
3571      if (trace_file != nullptr) {
3572        if (nested_counts.empty()) {
3573          // For top level values, print the value number.
3574          PrintF(trace_file, "    %3i: ",
3575                 frame.GetValueCount() - values_to_process);
3576        } else {
3577          // Take care of indenting for nested values.
3578          PrintF(trace_file, "         ");
3579          for (size_t j = 0; j < nested_counts.size(); j++) {
3580            PrintF(trace_file, "  ");
3581          }
3582        }
3583      }
3584
3585      TranslatedValue value = CreateNextTranslatedValue(
3586          i, static_cast<int>(frame.values_.size()), iterator, literal_array,
3587          input_frame_pointer, registers, trace_file);
3588      frame.Add(value);
3589
3590      if (trace_file != nullptr) {
3591        PrintF(trace_file, "\n");
3592      }
3593
3594      // Update the value count and resolve the nesting.
3595      values_to_process--;
3596      int children_count = value.GetChildrenCount();
3597      if (children_count > 0) {
3598        nested_counts.push(values_to_process);
3599        values_to_process = children_count;
3600      } else {
3601        while (values_to_process == 0 && !nested_counts.empty()) {
3602          values_to_process = nested_counts.top();
3603          nested_counts.pop();
3604        }
3605      }
3606    }
3607  }
3608
3609  CHECK(!iterator->HasNext() ||
3610        static_cast<Translation::Opcode>(iterator->Next()) ==
3611            Translation::BEGIN);
3612}
3613
3614
3615void TranslatedState::Prepare(bool has_adapted_arguments,
3616                              Address stack_frame_pointer) {
3617  for (auto& frame : frames_) frame.Handlify();
3618
3619  stack_frame_pointer_ = stack_frame_pointer;
3620  has_adapted_arguments_ = has_adapted_arguments;
3621
3622  UpdateFromPreviouslyMaterializedObjects();
3623}
3624
3625
3626Handle<Object> TranslatedState::MaterializeAt(int frame_index,
3627                                              int* value_index) {
3628  TranslatedFrame* frame = &(frames_[frame_index]);
3629  CHECK(static_cast<size_t>(*value_index) < frame->values_.size());
3630
3631  TranslatedValue* slot = &(frame->values_[*value_index]);
3632  (*value_index)++;
3633
3634  switch (slot->kind()) {
3635    case TranslatedValue::kTagged:
3636    case TranslatedValue::kInt32:
3637    case TranslatedValue::kUInt32:
3638    case TranslatedValue::kBoolBit:
3639    case TranslatedValue::kFloat:
3640    case TranslatedValue::kDouble: {
3641      slot->MaterializeSimple();
3642      Handle<Object> value = slot->GetValue();
3643      if (value->IsMutableHeapNumber()) {
3644        HeapNumber::cast(*value)->set_map(isolate()->heap()->heap_number_map());
3645      }
3646      return value;
3647    }
3648
3649    case TranslatedValue::kArgumentsObject: {
3650      int length = slot->GetChildrenCount();
3651      Handle<JSObject> arguments;
3652      if (GetAdaptedArguments(&arguments, frame_index)) {
3653        // Store the materialized object and consume the nested values.
3654        for (int i = 0; i < length; ++i) {
3655          MaterializeAt(frame_index, value_index);
3656        }
3657      } else {
3658        Handle<JSFunction> function =
3659            Handle<JSFunction>::cast(frame->front().GetValue());
3660        arguments = isolate_->factory()->NewArgumentsObject(function, length);
3661        Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
3662        DCHECK_EQ(array->length(), length);
3663        arguments->set_elements(*array);
3664        for (int i = 0; i < length; ++i) {
3665          Handle<Object> value = MaterializeAt(frame_index, value_index);
3666          array->set(i, *value);
3667        }
3668      }
3669      slot->value_ = arguments;
3670      return arguments;
3671    }
3672    case TranslatedValue::kCapturedObject: {
3673      int length = slot->GetChildrenCount();
3674
3675      // The map must be a tagged object.
3676      CHECK(frame->values_[*value_index].kind() == TranslatedValue::kTagged);
3677
3678      Handle<Object> result;
3679      if (slot->value_.ToHandle(&result)) {
3680        // This has been previously materialized, return the previous value.
3681        // We still need to skip all the nested objects.
3682        for (int i = 0; i < length; i++) {
3683          MaterializeAt(frame_index, value_index);
3684        }
3685
3686        return result;
3687      }
3688
3689      Handle<Object> map_object = MaterializeAt(frame_index, value_index);
3690      Handle<Map> map =
3691          Map::GeneralizeAllFieldRepresentations(Handle<Map>::cast(map_object));
3692      switch (map->instance_type()) {
3693        case MUTABLE_HEAP_NUMBER_TYPE:
3694        case HEAP_NUMBER_TYPE: {
3695          // Reuse the HeapNumber value directly as it is already properly
3696          // tagged and skip materializing the HeapNumber explicitly.
3697          Handle<Object> object = MaterializeAt(frame_index, value_index);
3698          slot->value_ = object;
3699          // On 32-bit architectures, there is an extra slot there because
3700          // the escape analysis calculates the number of slots as
3701          // object-size/pointer-size. To account for this, we read out
3702          // any extra slots.
3703          for (int i = 0; i < length - 2; i++) {
3704            MaterializeAt(frame_index, value_index);
3705          }
3706          return object;
3707        }
3708        case JS_OBJECT_TYPE:
3709        case JS_ERROR_TYPE:
3710        case JS_ARGUMENTS_TYPE: {
3711          Handle<JSObject> object =
3712              isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED);
3713          slot->value_ = object;
3714          Handle<Object> properties = MaterializeAt(frame_index, value_index);
3715          Handle<Object> elements = MaterializeAt(frame_index, value_index);
3716          object->set_properties(FixedArray::cast(*properties));
3717          object->set_elements(FixedArrayBase::cast(*elements));
3718          for (int i = 0; i < length - 3; ++i) {
3719            Handle<Object> value = MaterializeAt(frame_index, value_index);
3720            FieldIndex index = FieldIndex::ForPropertyIndex(object->map(), i);
3721            object->FastPropertyAtPut(index, *value);
3722          }
3723          return object;
3724        }
3725        case JS_ARRAY_TYPE: {
3726          Handle<JSArray> object = Handle<JSArray>::cast(
3727              isolate_->factory()->NewJSObjectFromMap(map, NOT_TENURED));
3728          slot->value_ = object;
3729          Handle<Object> properties = MaterializeAt(frame_index, value_index);
3730          Handle<Object> elements = MaterializeAt(frame_index, value_index);
3731          Handle<Object> length = MaterializeAt(frame_index, value_index);
3732          object->set_properties(FixedArray::cast(*properties));
3733          object->set_elements(FixedArrayBase::cast(*elements));
3734          object->set_length(*length);
3735          return object;
3736        }
3737        case JS_FUNCTION_TYPE: {
3738          Handle<SharedFunctionInfo> temporary_shared =
3739              isolate_->factory()->NewSharedFunctionInfo(
3740                  isolate_->factory()->empty_string(), MaybeHandle<Code>(),
3741                  false);
3742          Handle<JSFunction> object =
3743              isolate_->factory()->NewFunctionFromSharedFunctionInfo(
3744                  map, temporary_shared, isolate_->factory()->undefined_value(),
3745                  NOT_TENURED);
3746          slot->value_ = object;
3747          Handle<Object> properties = MaterializeAt(frame_index, value_index);
3748          Handle<Object> elements = MaterializeAt(frame_index, value_index);
3749          Handle<Object> prototype = MaterializeAt(frame_index, value_index);
3750          Handle<Object> shared = MaterializeAt(frame_index, value_index);
3751          Handle<Object> context = MaterializeAt(frame_index, value_index);
3752          Handle<Object> literals = MaterializeAt(frame_index, value_index);
3753          Handle<Object> entry = MaterializeAt(frame_index, value_index);
3754          Handle<Object> next_link = MaterializeAt(frame_index, value_index);
3755          object->ReplaceCode(*isolate_->builtins()->CompileLazy());
3756          object->set_map(*map);
3757          object->set_properties(FixedArray::cast(*properties));
3758          object->set_elements(FixedArrayBase::cast(*elements));
3759          object->set_prototype_or_initial_map(*prototype);
3760          object->set_shared(SharedFunctionInfo::cast(*shared));
3761          object->set_context(Context::cast(*context));
3762          object->set_literals(LiteralsArray::cast(*literals));
3763          CHECK(entry->IsNumber());  // Entry to compile lazy stub.
3764          CHECK(next_link->IsUndefined(isolate_));
3765          return object;
3766        }
3767        case CONS_STRING_TYPE: {
3768          Handle<ConsString> object = Handle<ConsString>::cast(
3769              isolate_->factory()
3770                  ->NewConsString(isolate_->factory()->undefined_string(),
3771                                  isolate_->factory()->undefined_string())
3772                  .ToHandleChecked());
3773          slot->value_ = object;
3774          Handle<Object> hash = MaterializeAt(frame_index, value_index);
3775          Handle<Object> length = MaterializeAt(frame_index, value_index);
3776          Handle<Object> first = MaterializeAt(frame_index, value_index);
3777          Handle<Object> second = MaterializeAt(frame_index, value_index);
3778          object->set_map(*map);
3779          object->set_length(Smi::cast(*length)->value());
3780          object->set_first(String::cast(*first));
3781          object->set_second(String::cast(*second));
3782          CHECK(hash->IsNumber());  // The {Name::kEmptyHashField} value.
3783          return object;
3784        }
3785        case CONTEXT_EXTENSION_TYPE: {
3786          Handle<ContextExtension> object =
3787              isolate_->factory()->NewContextExtension(
3788                  isolate_->factory()->NewScopeInfo(1),
3789                  isolate_->factory()->undefined_value());
3790          slot->value_ = object;
3791          Handle<Object> scope_info = MaterializeAt(frame_index, value_index);
3792          Handle<Object> extension = MaterializeAt(frame_index, value_index);
3793          object->set_scope_info(ScopeInfo::cast(*scope_info));
3794          object->set_extension(*extension);
3795          return object;
3796        }
3797        case FIXED_ARRAY_TYPE: {
3798          Handle<Object> lengthObject = MaterializeAt(frame_index, value_index);
3799          int32_t length = 0;
3800          CHECK(lengthObject->ToInt32(&length));
3801          Handle<FixedArray> object =
3802              isolate_->factory()->NewFixedArray(length);
3803          // We need to set the map, because the fixed array we are
3804          // materializing could be a context or an arguments object,
3805          // in which case we must retain that information.
3806          object->set_map(*map);
3807          slot->value_ = object;
3808          for (int i = 0; i < length; ++i) {
3809            Handle<Object> value = MaterializeAt(frame_index, value_index);
3810            object->set(i, *value);
3811          }
3812          return object;
3813        }
3814        case FIXED_DOUBLE_ARRAY_TYPE: {
3815          DCHECK_EQ(*map, isolate_->heap()->fixed_double_array_map());
3816          Handle<Object> lengthObject = MaterializeAt(frame_index, value_index);
3817          int32_t length = 0;
3818          CHECK(lengthObject->ToInt32(&length));
3819          Handle<FixedArrayBase> object =
3820              isolate_->factory()->NewFixedDoubleArray(length);
3821          slot->value_ = object;
3822          if (length > 0) {
3823            Handle<FixedDoubleArray> double_array =
3824                Handle<FixedDoubleArray>::cast(object);
3825            for (int i = 0; i < length; ++i) {
3826              Handle<Object> value = MaterializeAt(frame_index, value_index);
3827              CHECK(value->IsNumber());
3828              double_array->set(i, value->Number());
3829            }
3830          }
3831          return object;
3832        }
3833        default:
3834          PrintF(stderr, "[couldn't handle instance type %d]\n",
3835                 map->instance_type());
3836          FATAL("unreachable");
3837          return Handle<Object>::null();
3838      }
3839      UNREACHABLE();
3840      break;
3841    }
3842
3843    case TranslatedValue::kDuplicatedObject: {
3844      int object_index = slot->object_index();
3845      TranslatedState::ObjectPosition pos = object_positions_[object_index];
3846
3847      // Make sure the duplicate is refering to a previous object.
3848      CHECK(pos.frame_index_ < frame_index ||
3849            (pos.frame_index_ == frame_index &&
3850             pos.value_index_ < *value_index - 1));
3851
3852      Handle<Object> object =
3853          frames_[pos.frame_index_].values_[pos.value_index_].GetValue();
3854
3855      // The object should have a (non-sentinel) value.
3856      CHECK(!object.is_null() &&
3857            !object.is_identical_to(isolate_->factory()->arguments_marker()));
3858
3859      slot->value_ = object;
3860      return object;
3861    }
3862
3863    case TranslatedValue::kInvalid:
3864      UNREACHABLE();
3865      break;
3866  }
3867
3868  FATAL("We should never get here - unexpected deopt slot kind.");
3869  return Handle<Object>::null();
3870}
3871
3872
3873Handle<Object> TranslatedState::MaterializeObjectAt(int object_index) {
3874  TranslatedState::ObjectPosition pos = object_positions_[object_index];
3875  return MaterializeAt(pos.frame_index_, &(pos.value_index_));
3876}
3877
3878
3879bool TranslatedState::GetAdaptedArguments(Handle<JSObject>* result,
3880                                          int frame_index) {
3881  if (frame_index == 0) {
3882    // Top level frame -> we need to go to the parent frame on the stack.
3883    if (!has_adapted_arguments_) return false;
3884
3885    // This is top level frame, so we need to go to the stack to get
3886    // this function's argument. (Note that this relies on not inlining
3887    // recursive functions!)
3888    Handle<JSFunction> function =
3889        Handle<JSFunction>::cast(frames_[frame_index].front().GetValue());
3890    *result = Accessors::FunctionGetArguments(function);
3891    return true;
3892  } else {
3893    TranslatedFrame* previous_frame = &(frames_[frame_index]);
3894    if (previous_frame->kind() != TranslatedFrame::kArgumentsAdaptor) {
3895      return false;
3896    }
3897    // We get the adapted arguments from the parent translation.
3898    int length = previous_frame->height();
3899    Handle<JSFunction> function =
3900        Handle<JSFunction>::cast(previous_frame->front().GetValue());
3901    Handle<JSObject> arguments =
3902        isolate_->factory()->NewArgumentsObject(function, length);
3903    Handle<FixedArray> array = isolate_->factory()->NewFixedArray(length);
3904    arguments->set_elements(*array);
3905    TranslatedFrame::iterator arg_iterator = previous_frame->begin();
3906    arg_iterator++;  // Skip function.
3907    for (int i = 0; i < length; ++i) {
3908      Handle<Object> value = arg_iterator->GetValue();
3909      array->set(i, *value);
3910      arg_iterator++;
3911    }
3912    CHECK(arg_iterator == previous_frame->end());
3913    *result = arguments;
3914    return true;
3915  }
3916}
3917
3918
3919TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
3920    int jsframe_index, int* args_count) {
3921  for (size_t i = 0; i < frames_.size(); i++) {
3922    if (frames_[i].kind() == TranslatedFrame::kFunction ||
3923        frames_[i].kind() == TranslatedFrame::kInterpretedFunction) {
3924      if (jsframe_index > 0) {
3925        jsframe_index--;
3926      } else {
3927        // We have the JS function frame, now check if it has arguments adaptor.
3928        if (i > 0 &&
3929            frames_[i - 1].kind() == TranslatedFrame::kArgumentsAdaptor) {
3930          *args_count = frames_[i - 1].height();
3931          return &(frames_[i - 1]);
3932        }
3933        *args_count =
3934            frames_[i].shared_info()->internal_formal_parameter_count() + 1;
3935        return &(frames_[i]);
3936      }
3937    }
3938  }
3939  return nullptr;
3940}
3941
3942
3943void TranslatedState::StoreMaterializedValuesAndDeopt() {
3944  MaterializedObjectStore* materialized_store =
3945      isolate_->materialized_object_store();
3946  Handle<FixedArray> previously_materialized_objects =
3947      materialized_store->Get(stack_frame_pointer_);
3948
3949  Handle<Object> marker = isolate_->factory()->arguments_marker();
3950
3951  int length = static_cast<int>(object_positions_.size());
3952  bool new_store = false;
3953  if (previously_materialized_objects.is_null()) {
3954    previously_materialized_objects =
3955        isolate_->factory()->NewFixedArray(length);
3956    for (int i = 0; i < length; i++) {
3957      previously_materialized_objects->set(i, *marker);
3958    }
3959    new_store = true;
3960  }
3961
3962  CHECK_EQ(length, previously_materialized_objects->length());
3963
3964  bool value_changed = false;
3965  for (int i = 0; i < length; i++) {
3966    TranslatedState::ObjectPosition pos = object_positions_[i];
3967    TranslatedValue* value_info =
3968        &(frames_[pos.frame_index_].values_[pos.value_index_]);
3969
3970    CHECK(value_info->IsMaterializedObject());
3971
3972    Handle<Object> value(value_info->GetRawValue(), isolate_);
3973
3974    if (!value.is_identical_to(marker)) {
3975      if (previously_materialized_objects->get(i) == *marker) {
3976        previously_materialized_objects->set(i, *value);
3977        value_changed = true;
3978      } else {
3979        CHECK(previously_materialized_objects->get(i) == *value);
3980      }
3981    }
3982  }
3983  if (new_store && value_changed) {
3984    materialized_store->Set(stack_frame_pointer_,
3985                            previously_materialized_objects);
3986    CHECK(frames_[0].kind() == TranslatedFrame::kFunction ||
3987          frames_[0].kind() == TranslatedFrame::kInterpretedFunction ||
3988          frames_[0].kind() == TranslatedFrame::kTailCallerFunction);
3989    Object* const function = frames_[0].front().GetRawValue();
3990    Deoptimizer::DeoptimizeFunction(JSFunction::cast(function));
3991  }
3992}
3993
3994
3995void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
3996  MaterializedObjectStore* materialized_store =
3997      isolate_->materialized_object_store();
3998  Handle<FixedArray> previously_materialized_objects =
3999      materialized_store->Get(stack_frame_pointer_);
4000
4001  // If we have no previously materialized objects, there is nothing to do.
4002  if (previously_materialized_objects.is_null()) return;
4003
4004  Handle<Object> marker = isolate_->factory()->arguments_marker();
4005
4006  int length = static_cast<int>(object_positions_.size());
4007  CHECK_EQ(length, previously_materialized_objects->length());
4008
4009  for (int i = 0; i < length; i++) {
4010    // For a previously materialized objects, inject their value into the
4011    // translated values.
4012    if (previously_materialized_objects->get(i) != *marker) {
4013      TranslatedState::ObjectPosition pos = object_positions_[i];
4014      TranslatedValue* value_info =
4015          &(frames_[pos.frame_index_].values_[pos.value_index_]);
4016      CHECK(value_info->IsMaterializedObject());
4017
4018      value_info->value_ =
4019          Handle<Object>(previously_materialized_objects->get(i), isolate_);
4020    }
4021  }
4022}
4023
4024}  // namespace internal
4025}  // namespace v8
4026