1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29#include "lithium.h"
30#include "scopes.h"
31
32#if V8_TARGET_ARCH_IA32
33#include "ia32/lithium-ia32.h"
34#include "ia32/lithium-codegen-ia32.h"
35#elif V8_TARGET_ARCH_X64
36#include "x64/lithium-x64.h"
37#include "x64/lithium-codegen-x64.h"
38#elif V8_TARGET_ARCH_ARM
39#include "arm/lithium-arm.h"
40#include "arm/lithium-codegen-arm.h"
41#elif V8_TARGET_ARCH_MIPS
42#include "mips/lithium-mips.h"
43#include "mips/lithium-codegen-mips.h"
44#else
45#error "Unknown architecture."
46#endif
47
48namespace v8 {
49namespace internal {
50
51
52void LOperand::PrintTo(StringStream* stream) {
53  LUnallocated* unalloc = NULL;
54  switch (kind()) {
55    case INVALID:
56      stream->Add("(0)");
57      break;
58    case UNALLOCATED:
59      unalloc = LUnallocated::cast(this);
60      stream->Add("v%d", unalloc->virtual_register());
61      if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
62        stream->Add("(=%dS)", unalloc->fixed_slot_index());
63        break;
64      }
65      switch (unalloc->extended_policy()) {
66        case LUnallocated::NONE:
67          break;
68        case LUnallocated::FIXED_REGISTER: {
69          int reg_index = unalloc->fixed_register_index();
70          const char* register_name =
71              Register::AllocationIndexToString(reg_index);
72          stream->Add("(=%s)", register_name);
73          break;
74        }
75        case LUnallocated::FIXED_DOUBLE_REGISTER: {
76          int reg_index = unalloc->fixed_register_index();
77          const char* double_register_name =
78              DoubleRegister::AllocationIndexToString(reg_index);
79          stream->Add("(=%s)", double_register_name);
80          break;
81        }
82        case LUnallocated::MUST_HAVE_REGISTER:
83          stream->Add("(R)");
84          break;
85        case LUnallocated::WRITABLE_REGISTER:
86          stream->Add("(WR)");
87          break;
88        case LUnallocated::SAME_AS_FIRST_INPUT:
89          stream->Add("(1)");
90          break;
91        case LUnallocated::ANY:
92          stream->Add("(-)");
93          break;
94      }
95      break;
96    case CONSTANT_OPERAND:
97      stream->Add("[constant:%d]", index());
98      break;
99    case STACK_SLOT:
100      stream->Add("[stack:%d]", index());
101      break;
102    case DOUBLE_STACK_SLOT:
103      stream->Add("[double_stack:%d]", index());
104      break;
105    case REGISTER:
106      stream->Add("[%s|R]", Register::AllocationIndexToString(index()));
107      break;
108    case DOUBLE_REGISTER:
109      stream->Add("[%s|R]", DoubleRegister::AllocationIndexToString(index()));
110      break;
111    case ARGUMENT:
112      stream->Add("[arg:%d]", index());
113      break;
114  }
115}
116
117#define DEFINE_OPERAND_CACHE(name, type)                      \
118  L##name* L##name::cache = NULL;                             \
119                                                              \
120  void L##name::SetUpCache() {                                \
121    if (cache) return;                                        \
122    cache = new L##name[kNumCachedOperands];                  \
123    for (int i = 0; i < kNumCachedOperands; i++) {            \
124      cache[i].ConvertTo(type, i);                            \
125    }                                                         \
126  }                                                           \
127                                                              \
128  void L##name::TearDownCache() {                             \
129    delete[] cache;                                           \
130  }
131
132LITHIUM_OPERAND_LIST(DEFINE_OPERAND_CACHE)
133#undef DEFINE_OPERAND_CACHE
134
135void LOperand::SetUpCaches() {
136#define LITHIUM_OPERAND_SETUP(name, type) L##name::SetUpCache();
137  LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
138#undef LITHIUM_OPERAND_SETUP
139}
140
141
142void LOperand::TearDownCaches() {
143#define LITHIUM_OPERAND_TEARDOWN(name, type) L##name::TearDownCache();
144  LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
145#undef LITHIUM_OPERAND_TEARDOWN
146}
147
148
149bool LParallelMove::IsRedundant() const {
150  for (int i = 0; i < move_operands_.length(); ++i) {
151    if (!move_operands_[i].IsRedundant()) return false;
152  }
153  return true;
154}
155
156
157void LParallelMove::PrintDataTo(StringStream* stream) const {
158  bool first = true;
159  for (int i = 0; i < move_operands_.length(); ++i) {
160    if (!move_operands_[i].IsEliminated()) {
161      LOperand* source = move_operands_[i].source();
162      LOperand* destination = move_operands_[i].destination();
163      if (!first) stream->Add(" ");
164      first = false;
165      if (source->Equals(destination)) {
166        destination->PrintTo(stream);
167      } else {
168        destination->PrintTo(stream);
169        stream->Add(" = ");
170        source->PrintTo(stream);
171      }
172      stream->Add(";");
173    }
174  }
175}
176
177
178void LEnvironment::PrintTo(StringStream* stream) {
179  stream->Add("[id=%d|", ast_id().ToInt());
180  if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
181    stream->Add("deopt_id=%d|", deoptimization_index());
182  }
183  stream->Add("parameters=%d|", parameter_count());
184  stream->Add("arguments_stack_height=%d|", arguments_stack_height());
185  for (int i = 0; i < values_.length(); ++i) {
186    if (i != 0) stream->Add(";");
187    if (values_[i] == NULL) {
188      stream->Add("[hole]");
189    } else {
190      values_[i]->PrintTo(stream);
191    }
192  }
193  stream->Add("]");
194}
195
196
197void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
198  // Do not record arguments as pointers.
199  if (op->IsStackSlot() && op->index() < 0) return;
200  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
201  pointer_operands_.Add(op, zone);
202}
203
204
205void LPointerMap::RemovePointer(LOperand* op) {
206  // Do not record arguments as pointers.
207  if (op->IsStackSlot() && op->index() < 0) return;
208  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
209  for (int i = 0; i < pointer_operands_.length(); ++i) {
210    if (pointer_operands_[i]->Equals(op)) {
211      pointer_operands_.Remove(i);
212      --i;
213    }
214  }
215}
216
217
218void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
219  // Do not record arguments as pointers.
220  if (op->IsStackSlot() && op->index() < 0) return;
221  ASSERT(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
222  untagged_operands_.Add(op, zone);
223}
224
225
226void LPointerMap::PrintTo(StringStream* stream) {
227  stream->Add("{");
228  for (int i = 0; i < pointer_operands_.length(); ++i) {
229    if (i != 0) stream->Add(";");
230    pointer_operands_[i]->PrintTo(stream);
231  }
232  stream->Add("} @%d", position());
233}
234
235
236int ElementsKindToShiftSize(ElementsKind elements_kind) {
237  switch (elements_kind) {
238    case EXTERNAL_BYTE_ELEMENTS:
239    case EXTERNAL_PIXEL_ELEMENTS:
240    case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
241      return 0;
242    case EXTERNAL_SHORT_ELEMENTS:
243    case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
244      return 1;
245    case EXTERNAL_INT_ELEMENTS:
246    case EXTERNAL_UNSIGNED_INT_ELEMENTS:
247    case EXTERNAL_FLOAT_ELEMENTS:
248      return 2;
249    case EXTERNAL_DOUBLE_ELEMENTS:
250    case FAST_DOUBLE_ELEMENTS:
251    case FAST_HOLEY_DOUBLE_ELEMENTS:
252      return 3;
253    case FAST_SMI_ELEMENTS:
254    case FAST_ELEMENTS:
255    case FAST_HOLEY_SMI_ELEMENTS:
256    case FAST_HOLEY_ELEMENTS:
257    case DICTIONARY_ELEMENTS:
258    case NON_STRICT_ARGUMENTS_ELEMENTS:
259      return kPointerSizeLog2;
260  }
261  UNREACHABLE();
262  return 0;
263}
264
265
266int StackSlotOffset(int index) {
267  if (index >= 0) {
268    // Local or spill slot. Skip the frame pointer, function, and
269    // context in the fixed part of the frame.
270    return -(index + 3) * kPointerSize;
271  } else {
272    // Incoming parameter. Skip the return address.
273    return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
274  }
275}
276
277
278LChunk::LChunk(CompilationInfo* info, HGraph* graph)
279    : spill_slot_count_(0),
280      info_(info),
281      graph_(graph),
282      instructions_(32, graph->zone()),
283      pointer_maps_(8, graph->zone()),
284      inlined_closures_(1, graph->zone()) {
285}
286
287
288LLabel* LChunk::GetLabel(int block_id) const {
289  HBasicBlock* block = graph_->blocks()->at(block_id);
290  int first_instruction = block->first_instruction_index();
291  return LLabel::cast(instructions_[first_instruction]);
292}
293
294
295int LChunk::LookupDestination(int block_id) const {
296  LLabel* cur = GetLabel(block_id);
297  while (cur->replacement() != NULL) {
298    cur = cur->replacement();
299  }
300  return cur->block_id();
301}
302
303Label* LChunk::GetAssemblyLabel(int block_id) const {
304  LLabel* label = GetLabel(block_id);
305  ASSERT(!label->HasReplacement());
306  return label->label();
307}
308
309
310void LChunk::MarkEmptyBlocks() {
311  LPhase phase("L_Mark empty blocks", this);
312  for (int i = 0; i < graph()->blocks()->length(); ++i) {
313    HBasicBlock* block = graph()->blocks()->at(i);
314    int first = block->first_instruction_index();
315    int last = block->last_instruction_index();
316    LInstruction* first_instr = instructions()->at(first);
317    LInstruction* last_instr = instructions()->at(last);
318
319    LLabel* label = LLabel::cast(first_instr);
320    if (last_instr->IsGoto()) {
321      LGoto* goto_instr = LGoto::cast(last_instr);
322      if (label->IsRedundant() &&
323          !label->is_loop_header()) {
324        bool can_eliminate = true;
325        for (int i = first + 1; i < last && can_eliminate; ++i) {
326          LInstruction* cur = instructions()->at(i);
327          if (cur->IsGap()) {
328            LGap* gap = LGap::cast(cur);
329            if (!gap->IsRedundant()) {
330              can_eliminate = false;
331            }
332          } else {
333            can_eliminate = false;
334          }
335        }
336        if (can_eliminate) {
337          label->set_replacement(GetLabel(goto_instr->block_id()));
338        }
339      }
340    }
341  }
342}
343
344
345void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
346  LInstructionGap* gap = new(graph_->zone()) LInstructionGap(block);
347  gap->set_hydrogen_value(instr->hydrogen_value());
348  int index = -1;
349  if (instr->IsControl()) {
350    instructions_.Add(gap, zone());
351    index = instructions_.length();
352    instructions_.Add(instr, zone());
353  } else {
354    index = instructions_.length();
355    instructions_.Add(instr, zone());
356    instructions_.Add(gap, zone());
357  }
358  if (instr->HasPointerMap()) {
359    pointer_maps_.Add(instr->pointer_map(), zone());
360    instr->pointer_map()->set_lithium_position(index);
361  }
362}
363
364
365LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
366  return LConstantOperand::Create(constant->id(), zone());
367}
368
369
370int LChunk::GetParameterStackSlot(int index) const {
371  // The receiver is at index 0, the first parameter at index 1, so we
372  // shift all parameter indexes down by the number of parameters, and
373  // make sure they end up negative so they are distinguishable from
374  // spill slots.
375  int result = index - info()->scope()->num_parameters() - 1;
376  ASSERT(result < 0);
377  return result;
378}
379
380
381// A parameter relative to ebp in the arguments stub.
382int LChunk::ParameterAt(int index) {
383  ASSERT(-1 <= index);  // -1 is the receiver.
384  return (1 + info()->scope()->num_parameters() - index) *
385      kPointerSize;
386}
387
388
389LGap* LChunk::GetGapAt(int index) const {
390  return LGap::cast(instructions_[index]);
391}
392
393
394bool LChunk::IsGapAt(int index) const {
395  return instructions_[index]->IsGap();
396}
397
398
399int LChunk::NearestGapPos(int index) const {
400  while (!IsGapAt(index)) index--;
401  return index;
402}
403
404
405void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
406  GetGapAt(index)->GetOrCreateParallelMove(
407      LGap::START, zone())->AddMove(from, to, zone());
408}
409
410
411HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
412  return HConstant::cast(graph_->LookupValue(operand->index()));
413}
414
415
416Representation LChunk::LookupLiteralRepresentation(
417    LConstantOperand* operand) const {
418  return graph_->LookupValue(operand->index())->representation();
419}
420
421
422LChunk* LChunk::NewChunk(HGraph* graph) {
423  DisallowHandleAllocation no_handles;
424  DisallowHeapAllocation no_gc;
425  int values = graph->GetMaximumValueID();
426  CompilationInfo* info = graph->info();
427  if (values > LUnallocated::kMaxVirtualRegisters) {
428    info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
429    return NULL;
430  }
431  LAllocator allocator(values, graph);
432  LChunkBuilder builder(info, graph, &allocator);
433  LChunk* chunk = builder.Build();
434  if (chunk == NULL) return NULL;
435
436  if (!allocator.Allocate(chunk)) {
437    info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
438    return NULL;
439  }
440
441  chunk->set_allocated_double_registers(
442      allocator.assigned_double_registers());
443
444  return chunk;
445}
446
447
448Handle<Code> LChunk::Codegen() {
449  MacroAssembler assembler(info()->isolate(), NULL, 0);
450  LOG_CODE_EVENT(info()->isolate(),
451                 CodeStartLinePosInfoRecordEvent(
452                     assembler.positions_recorder()));
453  LCodeGen generator(this, &assembler, info());
454
455  MarkEmptyBlocks();
456
457  if (generator.GenerateCode()) {
458    CodeGenerator::MakeCodePrologue(info(), "optimized");
459    Code::Flags flags = info()->flags();
460    Handle<Code> code =
461        CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
462    generator.FinishCode(code);
463    code->set_is_crankshafted(true);
464    if (!code.is_null()) {
465      void* jit_handler_data =
466          assembler.positions_recorder()->DetachJITHandlerData();
467      LOG_CODE_EVENT(info()->isolate(),
468                     CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
469    }
470
471    CodeGenerator::PrintCode(code, info());
472    return code;
473  }
474  return Handle<Code>::null();
475}
476
477
478void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
479  allocated_double_registers_ = allocated_registers;
480  BitVector* doubles = allocated_double_registers();
481  BitVector::Iterator iterator(doubles);
482  while (!iterator.Done()) {
483    if (info()->saves_caller_doubles()) {
484      if (kDoubleSize == kPointerSize * 2) {
485        spill_slot_count_ += 2;
486      } else {
487        spill_slot_count_++;
488      }
489    }
490    iterator.Advance();
491  }
492}
493
494
495LPhase::~LPhase() {
496  if (ShouldProduceTraceOutput()) {
497    isolate()->GetHTracer()->TraceLithium(name(), chunk_);
498  }
499}
500
501
502} }  // namespace v8::internal
503