1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_LITHIUM_H_
29#define V8_LITHIUM_H_
30
31#include "allocation.h"
32#include "hydrogen.h"
33#include "safepoint-table.h"
34
35namespace v8 {
36namespace internal {
37
38class LOperand: public ZoneObject {
39 public:
40  enum Kind {
41    INVALID,
42    UNALLOCATED,
43    CONSTANT_OPERAND,
44    STACK_SLOT,
45    DOUBLE_STACK_SLOT,
46    REGISTER,
47    DOUBLE_REGISTER,
48    ARGUMENT
49  };
50
51  LOperand() : value_(KindField::encode(INVALID)) { }
52
53  Kind kind() const { return KindField::decode(value_); }
54  int index() const { return static_cast<int>(value_) >> kKindFieldWidth; }
55  bool IsConstantOperand() const { return kind() == CONSTANT_OPERAND; }
56  bool IsStackSlot() const { return kind() == STACK_SLOT; }
57  bool IsDoubleStackSlot() const { return kind() == DOUBLE_STACK_SLOT; }
58  bool IsRegister() const { return kind() == REGISTER; }
59  bool IsDoubleRegister() const { return kind() == DOUBLE_REGISTER; }
60  bool IsArgument() const { return kind() == ARGUMENT; }
61  bool IsUnallocated() const { return kind() == UNALLOCATED; }
62  bool IsIgnored() const { return kind() == INVALID; }
63  bool Equals(LOperand* other) const { return value_ == other->value_; }
64
65  void PrintTo(StringStream* stream);
66  void ConvertTo(Kind kind, int index) {
67    value_ = KindField::encode(kind);
68    value_ |= index << kKindFieldWidth;
69    ASSERT(this->index() == index);
70  }
71
72  // Calls SetUpCache() for each subclass. Don't forget to update this method
73  // if you add a new LOperand subclass.
74  static void SetUpCaches();
75
76 protected:
77  static const int kKindFieldWidth = 3;
78  class KindField : public BitField<Kind, 0, kKindFieldWidth> { };
79
80  LOperand(Kind kind, int index) { ConvertTo(kind, index); }
81
82  unsigned value_;
83};
84
85
86class LUnallocated: public LOperand {
87 public:
88  enum Policy {
89    NONE,
90    ANY,
91    FIXED_REGISTER,
92    FIXED_DOUBLE_REGISTER,
93    FIXED_SLOT,
94    MUST_HAVE_REGISTER,
95    WRITABLE_REGISTER,
96    SAME_AS_FIRST_INPUT
97  };
98
99  // Lifetime of operand inside the instruction.
100  enum Lifetime {
101    // USED_AT_START operand is guaranteed to be live only at
102    // instruction start. Register allocator is free to assign the same register
103    // to some other operand used inside instruction (i.e. temporary or
104    // output).
105    USED_AT_START,
106
107    // USED_AT_END operand is treated as live until the end of
108    // instruction. This means that register allocator will not reuse it's
109    // register for any other operand inside instruction.
110    USED_AT_END
111  };
112
113  explicit LUnallocated(Policy policy) : LOperand(UNALLOCATED, 0) {
114    Initialize(policy, 0, USED_AT_END);
115  }
116
117  LUnallocated(Policy policy, int fixed_index) : LOperand(UNALLOCATED, 0) {
118    Initialize(policy, fixed_index, USED_AT_END);
119  }
120
121  LUnallocated(Policy policy, Lifetime lifetime) : LOperand(UNALLOCATED, 0) {
122    Initialize(policy, 0, lifetime);
123  }
124
125  // The superclass has a KindField.  Some policies have a signed fixed
126  // index in the upper bits.
127  static const int kPolicyWidth = 3;
128  static const int kLifetimeWidth = 1;
129  static const int kVirtualRegisterWidth = 18;
130
131  static const int kPolicyShift = kKindFieldWidth;
132  static const int kLifetimeShift = kPolicyShift + kPolicyWidth;
133  static const int kVirtualRegisterShift = kLifetimeShift + kLifetimeWidth;
134  static const int kFixedIndexShift =
135      kVirtualRegisterShift + kVirtualRegisterWidth;
136
137  class PolicyField : public BitField<Policy, kPolicyShift, kPolicyWidth> { };
138
139  class LifetimeField
140      : public BitField<Lifetime, kLifetimeShift, kLifetimeWidth> {
141  };
142
143  class VirtualRegisterField
144      : public BitField<unsigned,
145                        kVirtualRegisterShift,
146                        kVirtualRegisterWidth> {
147  };
148
149  static const int kMaxVirtualRegisters = 1 << kVirtualRegisterWidth;
150  static const int kMaxFixedIndex = 63;
151  static const int kMinFixedIndex = -64;
152
153  bool HasAnyPolicy() const {
154    return policy() == ANY;
155  }
156  bool HasFixedPolicy() const {
157    return policy() == FIXED_REGISTER ||
158        policy() == FIXED_DOUBLE_REGISTER ||
159        policy() == FIXED_SLOT;
160  }
161  bool HasRegisterPolicy() const {
162    return policy() == WRITABLE_REGISTER || policy() == MUST_HAVE_REGISTER;
163  }
164  bool HasSameAsInputPolicy() const {
165    return policy() == SAME_AS_FIRST_INPUT;
166  }
167  Policy policy() const { return PolicyField::decode(value_); }
168  void set_policy(Policy policy) {
169    value_ = PolicyField::update(value_, policy);
170  }
171  int fixed_index() const {
172    return static_cast<int>(value_) >> kFixedIndexShift;
173  }
174
175  int virtual_register() const {
176    return VirtualRegisterField::decode(value_);
177  }
178
179  void set_virtual_register(unsigned id) {
180    value_ = VirtualRegisterField::update(value_, id);
181  }
182
183  LUnallocated* CopyUnconstrained() {
184    LUnallocated* result = new LUnallocated(ANY);
185    result->set_virtual_register(virtual_register());
186    return result;
187  }
188
189  static LUnallocated* cast(LOperand* op) {
190    ASSERT(op->IsUnallocated());
191    return reinterpret_cast<LUnallocated*>(op);
192  }
193
194  bool IsUsedAtStart() {
195    return LifetimeField::decode(value_) == USED_AT_START;
196  }
197
198 private:
199  void Initialize(Policy policy, int fixed_index, Lifetime lifetime) {
200    value_ |= PolicyField::encode(policy);
201    value_ |= LifetimeField::encode(lifetime);
202    value_ |= fixed_index << kFixedIndexShift;
203    ASSERT(this->fixed_index() == fixed_index);
204  }
205};
206
207
208class LMoveOperands BASE_EMBEDDED {
209 public:
210  LMoveOperands(LOperand* source, LOperand* destination)
211      : source_(source), destination_(destination) {
212  }
213
214  LOperand* source() const { return source_; }
215  void set_source(LOperand* operand) { source_ = operand; }
216
217  LOperand* destination() const { return destination_; }
218  void set_destination(LOperand* operand) { destination_ = operand; }
219
220  // The gap resolver marks moves as "in-progress" by clearing the
221  // destination (but not the source).
222  bool IsPending() const {
223    return destination_ == NULL && source_ != NULL;
224  }
225
226  // True if this move a move into the given destination operand.
227  bool Blocks(LOperand* operand) const {
228    return !IsEliminated() && source()->Equals(operand);
229  }
230
231  // A move is redundant if it's been eliminated, if its source and
232  // destination are the same, or if its destination is unneeded.
233  bool IsRedundant() const {
234    return IsEliminated() || source_->Equals(destination_) || IsIgnored();
235  }
236
237  bool IsIgnored() const {
238    return destination_ != NULL && destination_->IsIgnored();
239  }
240
241  // We clear both operands to indicate move that's been eliminated.
242  void Eliminate() { source_ = destination_ = NULL; }
243  bool IsEliminated() const {
244    ASSERT(source_ != NULL || destination_ == NULL);
245    return source_ == NULL;
246  }
247
248 private:
249  LOperand* source_;
250  LOperand* destination_;
251};
252
253
254class LConstantOperand: public LOperand {
255 public:
256  static LConstantOperand* Create(int index) {
257    ASSERT(index >= 0);
258    if (index < kNumCachedOperands) return &cache[index];
259    return new LConstantOperand(index);
260  }
261
262  static LConstantOperand* cast(LOperand* op) {
263    ASSERT(op->IsConstantOperand());
264    return reinterpret_cast<LConstantOperand*>(op);
265  }
266
267  static void SetUpCache();
268
269 private:
270  static const int kNumCachedOperands = 128;
271  static LConstantOperand* cache;
272
273  LConstantOperand() : LOperand() { }
274  explicit LConstantOperand(int index) : LOperand(CONSTANT_OPERAND, index) { }
275};
276
277
278class LArgument: public LOperand {
279 public:
280  explicit LArgument(int index) : LOperand(ARGUMENT, index) { }
281
282  static LArgument* cast(LOperand* op) {
283    ASSERT(op->IsArgument());
284    return reinterpret_cast<LArgument*>(op);
285  }
286};
287
288
289class LStackSlot: public LOperand {
290 public:
291  static LStackSlot* Create(int index) {
292    ASSERT(index >= 0);
293    if (index < kNumCachedOperands) return &cache[index];
294    return new LStackSlot(index);
295  }
296
297  static LStackSlot* cast(LOperand* op) {
298    ASSERT(op->IsStackSlot());
299    return reinterpret_cast<LStackSlot*>(op);
300  }
301
302  static void SetUpCache();
303
304 private:
305  static const int kNumCachedOperands = 128;
306  static LStackSlot* cache;
307
308  LStackSlot() : LOperand() { }
309  explicit LStackSlot(int index) : LOperand(STACK_SLOT, index) { }
310};
311
312
313class LDoubleStackSlot: public LOperand {
314 public:
315  static LDoubleStackSlot* Create(int index) {
316    ASSERT(index >= 0);
317    if (index < kNumCachedOperands) return &cache[index];
318    return new LDoubleStackSlot(index);
319  }
320
321  static LDoubleStackSlot* cast(LOperand* op) {
322    ASSERT(op->IsStackSlot());
323    return reinterpret_cast<LDoubleStackSlot*>(op);
324  }
325
326  static void SetUpCache();
327
328 private:
329  static const int kNumCachedOperands = 128;
330  static LDoubleStackSlot* cache;
331
332  LDoubleStackSlot() : LOperand() { }
333  explicit LDoubleStackSlot(int index) : LOperand(DOUBLE_STACK_SLOT, index) { }
334};
335
336
337class LRegister: public LOperand {
338 public:
339  static LRegister* Create(int index) {
340    ASSERT(index >= 0);
341    if (index < kNumCachedOperands) return &cache[index];
342    return new LRegister(index);
343  }
344
345  static LRegister* cast(LOperand* op) {
346    ASSERT(op->IsRegister());
347    return reinterpret_cast<LRegister*>(op);
348  }
349
350  static void SetUpCache();
351
352 private:
353  static const int kNumCachedOperands = 16;
354  static LRegister* cache;
355
356  LRegister() : LOperand() { }
357  explicit LRegister(int index) : LOperand(REGISTER, index) { }
358};
359
360
361class LDoubleRegister: public LOperand {
362 public:
363  static LDoubleRegister* Create(int index) {
364    ASSERT(index >= 0);
365    if (index < kNumCachedOperands) return &cache[index];
366    return new LDoubleRegister(index);
367  }
368
369  static LDoubleRegister* cast(LOperand* op) {
370    ASSERT(op->IsDoubleRegister());
371    return reinterpret_cast<LDoubleRegister*>(op);
372  }
373
374  static void SetUpCache();
375
376 private:
377  static const int kNumCachedOperands = 16;
378  static LDoubleRegister* cache;
379
380  LDoubleRegister() : LOperand() { }
381  explicit LDoubleRegister(int index) : LOperand(DOUBLE_REGISTER, index) { }
382};
383
384
385class LParallelMove : public ZoneObject {
386 public:
387  LParallelMove() : move_operands_(4) { }
388
389  void AddMove(LOperand* from, LOperand* to) {
390    move_operands_.Add(LMoveOperands(from, to));
391  }
392
393  bool IsRedundant() const;
394
395  const ZoneList<LMoveOperands>* move_operands() const {
396    return &move_operands_;
397  }
398
399  void PrintDataTo(StringStream* stream) const;
400
401 private:
402  ZoneList<LMoveOperands> move_operands_;
403};
404
405
406class LPointerMap: public ZoneObject {
407 public:
408  explicit LPointerMap(int position)
409      : pointer_operands_(8),
410        untagged_operands_(0),
411        position_(position),
412        lithium_position_(-1) { }
413
414  const ZoneList<LOperand*>* GetNormalizedOperands() {
415    for (int i = 0; i < untagged_operands_.length(); ++i) {
416      RemovePointer(untagged_operands_[i]);
417    }
418    untagged_operands_.Clear();
419    return &pointer_operands_;
420  }
421  int position() const { return position_; }
422  int lithium_position() const { return lithium_position_; }
423
424  void set_lithium_position(int pos) {
425    ASSERT(lithium_position_ == -1);
426    lithium_position_ = pos;
427  }
428
429  void RecordPointer(LOperand* op);
430  void RemovePointer(LOperand* op);
431  void RecordUntagged(LOperand* op);
432  void PrintTo(StringStream* stream);
433
434 private:
435  ZoneList<LOperand*> pointer_operands_;
436  ZoneList<LOperand*> untagged_operands_;
437  int position_;
438  int lithium_position_;
439};
440
441
442class LEnvironment: public ZoneObject {
443 public:
444  LEnvironment(Handle<JSFunction> closure,
445               FrameType frame_type,
446               int ast_id,
447               int parameter_count,
448               int argument_count,
449               int value_count,
450               LEnvironment* outer)
451      : closure_(closure),
452        frame_type_(frame_type),
453        arguments_stack_height_(argument_count),
454        deoptimization_index_(Safepoint::kNoDeoptimizationIndex),
455        translation_index_(-1),
456        ast_id_(ast_id),
457        parameter_count_(parameter_count),
458        pc_offset_(-1),
459        values_(value_count),
460        is_tagged_(value_count, closure->GetHeap()->isolate()->zone()),
461        spilled_registers_(NULL),
462        spilled_double_registers_(NULL),
463        outer_(outer) { }
464
465  Handle<JSFunction> closure() const { return closure_; }
466  FrameType frame_type() const { return frame_type_; }
467  int arguments_stack_height() const { return arguments_stack_height_; }
468  int deoptimization_index() const { return deoptimization_index_; }
469  int translation_index() const { return translation_index_; }
470  int ast_id() const { return ast_id_; }
471  int parameter_count() const { return parameter_count_; }
472  int pc_offset() const { return pc_offset_; }
473  LOperand** spilled_registers() const { return spilled_registers_; }
474  LOperand** spilled_double_registers() const {
475    return spilled_double_registers_;
476  }
477  const ZoneList<LOperand*>* values() const { return &values_; }
478  LEnvironment* outer() const { return outer_; }
479
480  void AddValue(LOperand* operand, Representation representation) {
481    values_.Add(operand);
482    if (representation.IsTagged()) {
483      is_tagged_.Add(values_.length() - 1);
484    }
485  }
486
487  bool HasTaggedValueAt(int index) const {
488    return is_tagged_.Contains(index);
489  }
490
491  void Register(int deoptimization_index,
492                int translation_index,
493                int pc_offset) {
494    ASSERT(!HasBeenRegistered());
495    deoptimization_index_ = deoptimization_index;
496    translation_index_ = translation_index;
497    pc_offset_ = pc_offset;
498  }
499  bool HasBeenRegistered() const {
500    return deoptimization_index_ != Safepoint::kNoDeoptimizationIndex;
501  }
502
503  void SetSpilledRegisters(LOperand** registers,
504                           LOperand** double_registers) {
505    spilled_registers_ = registers;
506    spilled_double_registers_ = double_registers;
507  }
508
509  void PrintTo(StringStream* stream);
510
511 private:
512  Handle<JSFunction> closure_;
513  FrameType frame_type_;
514  int arguments_stack_height_;
515  int deoptimization_index_;
516  int translation_index_;
517  int ast_id_;
518  int parameter_count_;
519  int pc_offset_;
520  ZoneList<LOperand*> values_;
521  BitVector is_tagged_;
522
523  // Allocation index indexed arrays of spill slot operands for registers
524  // that are also in spill slots at an OSR entry.  NULL for environments
525  // that do not correspond to an OSR entry.
526  LOperand** spilled_registers_;
527  LOperand** spilled_double_registers_;
528
529  LEnvironment* outer_;
530};
531
532
533// Iterates over the non-null, non-constant operands in an environment.
534class ShallowIterator BASE_EMBEDDED {
535 public:
536  explicit ShallowIterator(LEnvironment* env)
537      : env_(env),
538        limit_(env != NULL ? env->values()->length() : 0),
539        current_(0) {
540    SkipUninteresting();
541  }
542
543  bool Done() { return current_ >= limit_; }
544
545  LOperand* Current() {
546    ASSERT(!Done());
547    return env_->values()->at(current_);
548  }
549
550  void Advance() {
551    ASSERT(!Done());
552    ++current_;
553    SkipUninteresting();
554  }
555
556  LEnvironment* env() { return env_; }
557
558 private:
559  bool ShouldSkip(LOperand* op) {
560    return op == NULL || op->IsConstantOperand() || op->IsArgument();
561  }
562
563  // Skip until something interesting, beginning with and including current_.
564  void SkipUninteresting() {
565    while (current_ < limit_ && ShouldSkip(env_->values()->at(current_))) {
566      ++current_;
567    }
568  }
569
570  LEnvironment* env_;
571  int limit_;
572  int current_;
573};
574
575
576// Iterator for non-null, non-constant operands incl. outer environments.
577class DeepIterator BASE_EMBEDDED {
578 public:
579  explicit DeepIterator(LEnvironment* env)
580      : current_iterator_(env) {
581    SkipUninteresting();
582  }
583
584  bool Done() { return current_iterator_.Done(); }
585
586  LOperand* Current() {
587    ASSERT(!current_iterator_.Done());
588    return current_iterator_.Current();
589  }
590
591  void Advance() {
592    current_iterator_.Advance();
593    SkipUninteresting();
594  }
595
596 private:
597  void SkipUninteresting() {
598    while (current_iterator_.env() != NULL && current_iterator_.Done()) {
599      current_iterator_ = ShallowIterator(current_iterator_.env()->outer());
600    }
601  }
602
603  ShallowIterator current_iterator_;
604};
605
606
607int ElementsKindToShiftSize(ElementsKind elements_kind);
608
609
610} }  // namespace v8::internal
611
612#endif  // V8_LITHIUM_H_
613